|
1 |
| -#!/env/python3 |
2 |
| -# Burp URL Feeder Threaded |
3 |
| -# ZephrFish & Mantis 2022 |
4 |
| -# Python 3 Conversion |
5 |
| - |
6 |
| -import urllib3 |
7 |
| -import sys |
8 |
| -import re |
9 |
| -import requests |
10 |
| -import argparse |
11 |
| -from requests.packages.urllib3.exceptions import InsecureRequestWarning |
12 |
| -from multiprocessing import Pool |
13 |
| - |
14 |
| -urllib3.exceptions.InsecureRequestWarning |
15 |
| - |
16 |
| -version = "2.2" |
17 |
| - |
18 |
| -def printBanner(): |
19 |
| - print("""" |
20 |
| - __________ ___________ .___ |
21 |
| - \______ \__ _______________\_ _____/___ ____ __| _/ |
22 |
| - | | _/ | \_ __ \____ \| __)/ __ \_/ __ \ / __ | |
23 |
| - | | \ | /| | \/ |_> > \\ ___/\ ___// /_/ | |
24 |
| - |______ /____/ |__| | __/\___ / \___ >\___ >____ | |
25 |
| - \/ |__| \/ \/ \/ \/ |
26 |
| - Version {0}""".format(version) |
27 |
| - sleep(1) |
28 |
| - |
29 |
| -def fetchUrl(url): |
30 |
| - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) |
31 |
| - |
32 |
| - # Change me to whatever you want, can be IP of host or localhost wherever Burp is listening |
33 |
| - proxy = { |
34 |
| - "http": "http://localhost:8080", |
35 |
| - "https": "https://localhost:8080", |
36 |
| - } |
37 |
| - |
38 |
| - headers = sys.argv[2] |
39 |
| - |
40 |
| - regex=re.compile('^http://|^https://') |
41 |
| - if re.match(regex, url): |
42 |
| - try: |
43 |
| - normalresponse = requests.get(url.rstrip(), proxies=proxy, verify=False, timeout=8, headers=headers) |
44 |
| - print("URL: {0} | Status: {1}".format(url.rstrip(), normalresponse.status_code)) |
45 |
| - except: |
46 |
| - pass |
47 |
| - else: |
48 |
| - HTTPSecure = "https://"+url.rstrip() |
49 |
| - HTTPNot = "http://"+url.rstrip() |
50 |
| - try: |
51 |
| - httpsresponse = requests.get(HTTPSecure, proxies=proxy, verify=False, timeout=8, headers=headers) |
52 |
| - httpresponse = requests.get(HTTPNot, proxies=proxy, verify=False, timeout=8, headers=headers) |
53 |
| - print("URL: {0} | Status: {1}".format(HTTPNot, httpresponse.status_code)) |
54 |
| - print("URL: {0} | Status: {1}".format(HTTPSecure, httpsresponse.status_code)) |
55 |
| - |
56 |
| - except: |
57 |
| - pass |
58 |
| - |
59 |
| -def burpFeed(urls, threads): |
60 |
| - pool = Pool(int(threads)) |
61 |
| - with open(urls, encoding="utf8") as source_file: |
62 |
| - results = pool.map(fetchUrl, source_file, int(threads)) |
63 |
| - print(results) |
64 |
| - |
65 |
| -if __name__ == '__main__': |
66 |
| - try: |
67 |
| - burpFeed(sys.argv[1], sys.argv[2]) |
68 |
| - except: |
69 |
| - print("Not enough arguments! %s <hosts file> <headers>" % sys.argv[0]) |
70 |
| - sys.exit() |
71 |
| - |
| 1 | +#!/env/python3 |
| 2 | +# Burp URL Feeder Threaded |
| 3 | +# ZephrFish & Mantis 2022 |
| 4 | +# Python 3 Conversion |
| 5 | + |
| 6 | +import urllib3 |
| 7 | +import sys |
| 8 | +import re |
| 9 | +import requests |
| 10 | +import argparse |
| 11 | +from requests.packages.urllib3.exceptions import InsecureRequestWarning |
| 12 | +from multiprocessing import Pool |
| 13 | + |
| 14 | +urllib3.exceptions.InsecureRequestWarning |
| 15 | + |
| 16 | +version = "2.2" |
| 17 | + |
| 18 | +def printBanner(): |
| 19 | + print("""" |
| 20 | + __________ ___________ .___ |
| 21 | + \______ \__ _______________\_ _____/___ ____ __| _/ |
| 22 | + | | _/ | \_ __ \____ \| __)/ __ \_/ __ \ / __ | |
| 23 | + | | \ | /| | \/ |_> > \\ ___/\ ___// /_/ | |
| 24 | + |______ /____/ |__| | __/\___ / \___ >\___ >____ | |
| 25 | + \/ |__| \/ \/ \/ \/ |
| 26 | + Version {0}""".format(version) |
| 27 | + sleep(1) |
| 28 | + |
| 29 | +def fetchUrl(url): |
| 30 | + requests.packages.urllib3.disable_warnings(InsecureRequestWarning) |
| 31 | + |
| 32 | + # Change me to whatever you want, can be IP of host or localhost wherever Burp is listening |
| 33 | + proxy = { |
| 34 | + "http": "http://localhost:8080", |
| 35 | + "https": "https://localhost:8080", |
| 36 | + } |
| 37 | + |
| 38 | + headers = sys.argv[2] |
| 39 | + |
| 40 | + regex=re.compile('^http://|^https://') |
| 41 | + if re.match(regex, url): |
| 42 | + try: |
| 43 | + normalresponse = requests.get(url.rstrip(), proxies=proxy, verify=False, timeout=8, headers=headers) |
| 44 | + print("URL: {0} | Status: {1}".format(url.rstrip(), normalresponse.status_code)) |
| 45 | + except: |
| 46 | + pass |
| 47 | + else: |
| 48 | + HTTPSecure = "https://"+url.rstrip() |
| 49 | + HTTPNot = "http://"+url.rstrip() |
| 50 | + try: |
| 51 | + httpsresponse = requests.get(HTTPSecure, proxies=proxy, verify=False, timeout=8, headers=headers) |
| 52 | + httpresponse = requests.get(HTTPNot, proxies=proxy, verify=False, timeout=8, headers=headers) |
| 53 | + print("URL: {0} | Status: {1}".format(HTTPNot, httpresponse.status_code)) |
| 54 | + print("URL: {0} | Status: {1}".format(HTTPSecure, httpsresponse.status_code)) |
| 55 | + |
| 56 | + except: |
| 57 | + pass |
| 58 | + |
| 59 | +def burpFeed(urls, threads): |
| 60 | + pool = Pool(int(threads)) |
| 61 | + with open(urls, encoding="utf8") as source_file: |
| 62 | + results = pool.map(fetchUrl, source_file, int(threads)) |
| 63 | + print(results) |
| 64 | + |
| 65 | +if __name__ == '__main__': |
| 66 | + try: |
| 67 | + burpFeed(sys.argv[1], sys.argv[2]) |
| 68 | + except: |
| 69 | + print("Not enough arguments! %s <hosts file> <headers>" % sys.argv[0]) |
| 70 | + sys.exit() |
| 71 | + |
0 commit comments