Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

multiple scan feature added #358

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
### Newly Added Features
Now you can scan multiple urls from file by using -list flag.
```
$ python xsstrike.py -list myUrls.txt
```

### 3.1.5
- Fix color bug that resulted in DOM XSS vulnerabilities not
being reported on certain systems (Windows, macOS, iOS)
Expand Down
113 changes: 113 additions & 0 deletions modes/multi_scan.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import copy
import re
from urllib.parse import urlparse, quote, unquote

from core.checker import checker
from core.colors import end, green, que
import core.config
from core.config import xsschecker, minEfficiency
from core.dom import dom
from core.filterChecker import filterChecker
from core.generator import generator
from core.htmlParser import htmlParser
from core.requester import requester
from core.utils import getUrl, getParams, getVar
from core.wafDetector import wafDetector
from core.log import setup_logger

logger = setup_logger(__name__)



def multi_scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip):
GET, POST = (False, True) if paramData else (True, False)

response = requester(target, {}, headers, GET, delay, timeout).text
if not skipDOM:
logger.run('Checking for DOM vulnerabilities')
highlighted = dom(response)
if highlighted:
logger.good('Potentially vulnerable objects found')
logger.red_line(level='good')
for line in highlighted:
logger.no_format(line, level='good')
logger.red_line(level='good')
host = urlparse(target).netloc # Extracts host out of the url
logger.debug('Host to scan: {}'.format(host))
url = getUrl(target, GET)
logger.debug('Url to scan: {}'.format(url))
params = getParams(target, paramData, GET)
logger.debug_json('Scan parameters:', params)
if not params:
logger.error('No parameters to test.')
quit()
WAF = wafDetector(
url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
if WAF:
logger.error('WAF detected: %s%s%s' % (green, WAF, end))
else:
logger.good('WAF Status: %sOffline%s' % (green, end))

for paramName in params.keys():
paramsCopy = copy.deepcopy(params)
logger.info(target)
logger.info('Testing parameter: %s' % paramName)
if encoding:
paramsCopy[paramName] = encoding(xsschecker)
else:
paramsCopy[paramName] = xsschecker
response = requester(url, paramsCopy, headers, GET, delay, timeout)
occurences = htmlParser(response, encoding)
positions = occurences.keys()
logger.debug('Scan occurences: {}'.format(occurences))
if not occurences:
logger.error('No reflection found')
continue
else:
logger.info('Reflections found: %i' % len(occurences))

logger.run('Analysing reflections')
efficiencies = filterChecker(
url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
logger.debug('Scan efficiencies: {}'.format(efficiencies))
logger.run('Generating payloads')
vectors = generator(occurences, response.text)
total = 0
for v in vectors.values():
total += len(v)
if total == 0:
logger.error('No vectors were crafted.')
continue
logger.info('Payloads generated: %i' % total)
progress = 0
for confidence, vects in vectors.items():
for vect in vects:
if core.config.globalVariables['path']:
vect = vect.replace('/', '%2F')
loggerVector = vect
progress += 1
logger.run('Progress: %i/%i\r' % (progress, total))
if not GET:
vect = unquote(vect)
efficiencies = checker(
url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
if not efficiencies:
for i in range(len(occurences)):
efficiencies.append(0)
bestEfficiency = max(efficiencies)
if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
if not skip:
choice = input(
'%s Would you like to continue scanning? [y/N] ' % que).lower()
if choice != 'y':
quit()
elif bestEfficiency > minEfficiency:
logger.red_line()
logger.good('Payload: %s' % loggerVector)
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
logger.no_format('')
2 changes: 2 additions & 0 deletions modes/scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,5 @@ def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip):
logger.info('Efficiency: %i' % bestEfficiency)
logger.info('Confidence: %i' % confidence)
logger.no_format('')


36 changes: 28 additions & 8 deletions xsstrike.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,18 @@
try:
import concurrent.futures
from urllib.parse import urlparse

try:
import fuzzywuzzy
except ImportError:
import os
print ('%s fuzzywuzzy isn\'t installed, installing now.' % info)

print('%s fuzzywuzzy isn\'t installed, installing now.' % info)
ret_code = os.system('pip3 install fuzzywuzzy')
if(ret_code != 0):
if (ret_code != 0):
print('%s fuzzywuzzy installation failed.' % bad)
quit()
print ('%s fuzzywuzzy has been installed, restart XSStrike.' % info)
print('%s fuzzywuzzy has been installed, restart XSStrike.' % info)
quit()
except ImportError: # throws error in python2
print('%s XSStrike isn\'t compatible with python2.\n Use python > 3.4 to run XSStrike.' % bad)
Expand All @@ -39,6 +41,7 @@
# Processing command line arguments, where dest var names will be mapped to local vars with the same name
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', help='url', dest='target')
parser.add_argument('-list', '--list', help='url lists', dest='list') # added by aspasht
parser.add_argument('--data', help='post data', dest='paramData')
parser.add_argument('-e', '--encode', help='encode payloads', dest='encode')
parser.add_argument('--fuzzer', help='fuzzer',
Expand Down Expand Up @@ -85,6 +88,7 @@
# Pull all parameter values of dict from argparse namespace into local variables of name == key
# The following works, but the static checkers are too static ;-) locals().update(vars(args))
target = args.target
target_list = args.list
path = args.path
jsonData = args.jsonData
paramData = args.paramData
Expand Down Expand Up @@ -122,6 +126,8 @@
from modes.bruteforcer import bruteforcer
from modes.crawl import crawl
from modes.scan import scan
from modes.multi_scan import multi_scan

from modes.singleFuzz import singleFuzz

if type(args.add_headers) == bool:
Expand Down Expand Up @@ -161,20 +167,33 @@
updater()
quit() # quitting because files have been changed

if not target and not args_seeds: # if the user hasn't supplied a url
logger.no_format('\n' + parser.format_help().lower())
quit()
if not args_seeds:
if target_list:
with open(target_list, 'r') as f:
urls = []
for url in f:
urls.append(url.strip("\n"))
for line in urls:
multi_scan(line,paramData,encoding,headers,delay,timeout,skipDOM,skip)
elif target:

scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip)
else:
logger.no_format('\n' + parser.format_help().lower())
quit()

if fuzz:
singleFuzz(target, paramData, encoding, headers, delay, timeout)
elif not recursive and not args_seeds:
if args_file:
bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout)
else:
scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip)
pass
# scan(target, paramData, encoding, headers, delay, timeout, skipDOM, skip)
else:
if target:
seedList.append(target)

for target in seedList:
logger.run('Crawling the target')
scheme = urlparse(target).scheme
Expand All @@ -194,7 +213,8 @@
domURLs.append(0)
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
futures = (threadpool.submit(crawl, scheme, host, main_url, form,
blindXSS, blindPayload, headers, delay, timeout, encoding) for form, domURL in zip(forms, domURLs))
blindXSS, blindPayload, headers, delay, timeout, encoding) for form, domURL in
zip(forms, domURLs))
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
if i + 1 == len(forms) or (i + 1) % threadCount == 0:
logger.info('Progress: %i/%i\r' % (i + 1, len(forms)))
Expand Down