Skip to content

Commit

Permalink
Merge pull request #27 from ThePirateWhoSmellsOfSunflowers/fix_getuse…
Browse files Browse the repository at this point in the history
…rspns

Use LDAP paged search in GetUserSPNs.py
  • Loading branch information
ShutdownRepo committed Mar 18, 2023
2 parents 8975ed2 + 2261c0a commit 4375a99
Showing 1 changed file with 16 additions and 3 deletions.
19 changes: 16 additions & 3 deletions examples/GetUserSPNs.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def __init__(self, username, password, user_domain, target_domain, cmdLineOption
self.__kdcHost = cmdLineOptions.dc_host
self.__saveTGS = cmdLineOptions.save
self.__requestUser = cmdLineOptions.request_user
self.__stealth = cmdLineOptions.stealth
if cmdLineOptions.hashes is not None:
self.__lmhash, self.__nthash = cmdLineOptions.hashes.split(':')

Expand Down Expand Up @@ -294,28 +295,37 @@ def run(self):
raise

# Building the search filter
filter_spn = "servicePrincipalName=*"
filter_person = "objectCategory=person"
filter_not_disabled = "!(userAccountControl:1.2.840.113556.1.4.803:=2)"

searchFilter = "(&"
searchFilter += "(" + filter_person + ")"
searchFilter += "(" + filter_not_disabled + ")"

if self.__stealth is True:
logging.warning('Stealth option may cause huge memory consumption / out-of-memory errors on very large domains.')
else:
searchFilter += "(" + filter_spn + ")"

if self.__requestUser is not None:
searchFilter += '(sAMAccountName:=%s)' % self.__requestUser

searchFilter += ')'

try:
# Microsoft Active Directory set an hard limit of 1000 entries returned by any search
paged_search_control = ldapasn1.SimplePagedResultsControl(criticality=True, size=1000)

resp = ldapConnection.search(searchFilter=searchFilter,
attributes=['servicePrincipalName', 'sAMAccountName',
'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon'],
sizeLimit=100000)
searchControls=[paged_search_control])

except ldap.LDAPSearchError as e:
if e.getErrorString().find('sizeLimitExceeded') >= 0:
# We should never reach this code as we use paged search now
logging.debug('sizeLimitExceeded exception caught, giving up and processing the data received')
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
# paged queries
resp = e.getAnswers()
pass
else:
Expand Down Expand Up @@ -484,6 +494,9 @@ def request_multiple_TGSs(self, usernames):
parser.add_argument('-target-domain', action='store',
help='Domain to query/request if different than the domain of the user. '
'Allows for Kerberoasting across trusts.')

parser.add_argument('-stealth', action='store_true', help='Removes the (servicePrincipalName=*) filter from the LDAP query for added stealth. '
'May cause huge memory consumption / errors on large domains.')
parser.add_argument('-no-preauth', action='store', help='account that does not require preauth, to obtain Service Ticket'
' through the AS')
parser.add_argument('-usersfile', help='File with user per line to test')
Expand Down

0 comments on commit 4375a99

Please sign in to comment.