Skip to content

Commit

Permalink
Merge pull request #2 from Zaeem20/publishOnTestPyPI
Browse files Browse the repository at this point in the history
Changes in API
  • Loading branch information
Zaeem20 authored Oct 15, 2021
2 parents 0de4065 + 04d33c9 commit 9483af0
Show file tree
Hide file tree
Showing 9 changed files with 65 additions and 41 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/pypublish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
name: Upload Python Package

on:
release:
types: [published]
push:
branches: [publishOnTestPyPI]

jobs:
deploy:
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
__pycache__
build
*.egg-info
dist
dist
.vscode
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ At your Fingertips, just in 3-5 lines of code...
• Pagelinks Extractor<br />
• Shared DNS Scanner<br />
• DNS Lookup<br />
• Whois Lookup <br />
• Geo-IP Lookup<br />

## Installation
Expand Down Expand Up @@ -70,6 +71,9 @@ async def portscan(target):
async def reversedns(target):
await asyncmanner.reversedns(target)

async def whois(target):
await asyncmanner.whois(target)

async def extract_pagelink(target):
await asyncmanner.extract_pagelink(target)

Expand Down
38 changes: 16 additions & 22 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,27 @@
from setuptools import setup, find_packages
import re
import os

version = ''
with open('webeye/__init__.py') as f:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1)
def read(rel_path: str) -> str:
here = os.path.abspath(os.path.dirname(__file__))
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with open(os.path.join(here, rel_path)) as fp:
return fp.read()

if version.endswith(('a', 'b', 'rc')):
# append version identifier based on commit count
try:
import subprocess
p = subprocess.Popen(['git', 'rev-list', '--count', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
version += out.decode('utf-8').strip()
p = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
version += '+g' + out.decode('utf-8').strip()
except Exception:
pass

def get_version(rel_path: str) -> str:
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
raise RuntimeError("Unable to find version string.")

with open("README.md", "r", encoding="utf-8") as f:
readme = f.read()

setup(
name='webeye',
version=version,
version=get_version('webeye/__init__.py'),
long_description=readme,
long_description_content_type="text/markdown",
entry_points={'console_scripts':['webeye=webeye.__main__:main']},
Expand All @@ -46,7 +40,7 @@
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6"],
python_requires=">=3.6",
install_requires=['requests >= 2','httpx == 0.20.0'],
install_requires=['requests >= 2','httpx == 0.20.0', 'mechanize == 0.4.7', 'beautifulsoup4 == 4.10.0'],
keywords="webeye red_hawk nikto webrecon recondog",
packages=find_packages(exclude=["docs","tests"]),
data_files=None
Expand Down
3 changes: 3 additions & 0 deletions tests/test_asyncwebeye.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,8 @@ async def test_is_cloudflare(self):
async def test_is_honeypot(self):
self.assertIsInstance(await a.is_honeypot('google.com'), str)

async def test_whois(self):
self.assertIsInstance(await a.whois('google.com'), str)

if __name__ == '__main__':
unittest.main()
3 changes: 3 additions & 0 deletions tests/test_webeye.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,8 @@ def test_is_cloudflare(self):
def test_is_honeypot(self):
self.assertIsInstance(is_honeypot('google.com'), str)

def test_whois(self):
self.assertIsInstance(whois('google.com'), str)

if __name__ == '__main__':
unittest.main()
2 changes: 1 addition & 1 deletion webeye/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from .core import *

__name__ = "webeye"
__version__ = "2.2.1"
__version__ = "2.2.3"
__author__ = "Zaeem Technical"
__helper__ = "Rishi Raj"
__github__ = "https://github.com/Zaeem20/webeye/"
Expand Down
6 changes: 4 additions & 2 deletions webeye/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import os

def main():
__version__= webeye.__version__
__author__ = 'Zaeem Techical'

logo = '''
Expand All @@ -13,13 +12,14 @@ def main():
===================================
'''

parser = argparse.ArgumentParser(description=f'|<――――― Webeye v{__version__} - Help Menu ―――――>|', epilog=f"Author: {__author__} (Zaeem20)")
parser = argparse.ArgumentParser(description=f'|<――――― Webeye v{webeye.__version__} - Help Menu ―――――>|', epilog=f"Author: {__author__} (Zaeem20)")
parser.add_argument('-s', '--scan',action='store_true', help='Scan Open Ports of Given Host')
parser.add_argument('-d', '--dns',action='store_true', help='Do DNS Lookup of Given Host')
parser.add_argument('-hp','--honeypot',action='store_true', help='Find Honeypot Probablity for Given Host')
parser.add_argument('-hs', '--subdomain',action='store_true', help='Enumerate Subdomain for Given Host')
parser.add_argument('-C','--cloud',action='store_true', help='Check Site is protected with Cloudflare or not...')
parser.add_argument('-b', '--grab',action='store_true', help='Grab banner of a Website')
parser.add_argument('-w', '--whois', action='store_true', help='Whois Lookup of Website')
parser.add_argument('-sD', '--shareddns',action='store_true',help='Find Shared DNS Server of a Website')
parser.add_argument('-geo', '--geolookup',action='store_true', help='Find Geolocation and many other info of host')
parser.add_argument('-rdns', '--reversedns',action='store_true',help='Reverse DNS Lookup of a Website')
Expand Down Expand Up @@ -66,6 +66,8 @@ def main():
reverseip(options.target, cli=True)
if options.honeypot: # Honeypot Lookup
print(is_honeypot(options.target))
if options.whois:
print(whois(options.target))
if options.reversedns: # Reverse DNS Lookup
print(reversedns(options.target))
if options.subdomain: # Subdomain Lookup
Expand Down
43 changes: 30 additions & 13 deletions webeye/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import json as _json
from httpx import AsyncClient
from datetime import datetime
import mechanize
from bs4 import BeautifulSoup
from collections.abc import Iterable
from typing import Union
from concurrent.futures import ThreadPoolExecutor
Expand Down Expand Up @@ -63,7 +65,7 @@ def scan(target: str, port: Union[int, Iterable], start: int=0, dev_mode: bool=F
'''
try:
realip = socket.gethostbyname(target)
lists = [f'\nPyPort started at {datetime.utcnow().strftime("%d-%b-%Y %I:%M %p")}<br/>']
lists = [f'\nPyPort started at {datetime.utcnow().strftime("%d-%b-%Y %I:%M %p")}<br/>','PORTS | SERVICE']
on = time.time()
def scan_port(port) -> Union[str,list]:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
Expand All @@ -73,7 +75,7 @@ def scan_port(port) -> Union[str,list]:
if dev_mode:
lists.append(f'{port}/{socket.getservbyport(port)}')
elif api:
lists.append(f'OPEN_PORTS: {port}/{socket.getservbyport(port)}')
lists.append(f'{port}/tcp | {socket.getservbyport(port)}')
else:
print(f'{port}/tcp\t | {socket.getservbyport(port)}\t| open |')
sock.close()
Expand All @@ -92,7 +94,7 @@ def execute():
runner = execute()

if dev_mode:
return runner,lists[1:]
return runner,lists[2:]
elif api:
return runner, lists
else:
Expand Down Expand Up @@ -160,11 +162,17 @@ def grab(host: str, schema='http://', cli=False) -> Union[dict, None]:
except KeyboardInterrupt:
return sys.exit('Stopped, Exiting: 1')

# <----- Closed Whois Lookup ---->

# def whois(host):
# api = requests.get(f"https://api.hackertarget.com/whois/?q={host}")
# return api.text
def whois(target: str) -> str:
browser = mechanize.Browser()
url = 'https://www.ipvoid.com/whois/'
browser.open(url)
browser.select_form(nr=0)
browser['host']=target
response = browser.submit().read()
# Scraping Content
soup = BeautifulSoup(response, 'html.parser')
result = soup.find('textarea').get_text()
return result

def geoip(host: str, cli=False) -> Union[dict, None]:
realip = socket.gethostbyname(host)
Expand Down Expand Up @@ -393,11 +401,20 @@ async def grab(host: str, schema='http://', cli=False, json :bool=False, indent:
except KeyboardInterrupt:
return 'Stopped, Exiting: 1'

# <----- Closed Whois Lookup ---->

# def whois(host):
# api = requests.get(f"https://api.hackertarget.com/whois/?q={host}")
# return api.text
async def whois(target: str) -> str:
try:
browser = mechanize.Browser()
url = 'https://www.ipvoid.com/whois/'
browser.open(url)
browser.select_form(nr=0)
browser['host']=target
response = browser.submit().read()
# Scraping Content
soup = BeautifulSoup(response, 'html.parser')
result = soup.find('textarea').get_text()
return result
except Exception as e:
print(e)

async def geoip(self, host: str, cli=False) -> Union[dict, None]:
'''Asynchronous GeoLocation Enumerator of given host'''
Expand Down

0 comments on commit 9483af0

Please sign in to comment.