Skip to content

Commit

Permalink
Added findomain for suip :).
Browse files Browse the repository at this point in the history
  • Loading branch information
NotoriousRebel committed Dec 31, 2019
1 parent e5c4c9d commit 30140fc
Showing 1 changed file with 16 additions and 16 deletions.
32 changes: 16 additions & 16 deletions theHarvester/discovery/suip.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,30 +12,33 @@ def __init__(self, word: str):
self.totalhosts: set = set()
self.totalips: set = set()

async def request(self, url, params):
async def request(self, url, params, findomain=False):
headers = {'User-Agent': Core.get_user_agent()}
data = {'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
data = {'url': self.word.replace('www.', ''), 'only_resolved': '1', 'Submit1': 'Submit'} if findomain else \
{'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
return await AsyncFetcher.post_fetch(url, headers=headers, params=params, data=data)

async def handler(self, url):
first_param = [url, (('act', 'subfinder'),), ]
second_param = [url, (('act', 'amass'),), ]
# TODO RESEARCH https://suip.biz/?act=findomain
first_param = [url, (('act', 'subfinder'),), False]
second_param = [url, (('act', 'amass'),), False]
third_param = [url, (('act', 'findomain'),), True]
async_requests = [
self.request(url=url, params=params)
for url, params in [first_param, second_param]
self.request(url=url, params=params, findomain=findomain)
for url, params, findomain in [first_param, second_param, third_param]
]
results = await asyncio.gather(*async_requests)
return results

async def do_search(self):
try:
results = await self.handler(url="https://suip.biz/")
for result in results:
# results has both responses in a list
# iterate through them and parse out the urls
for num in range(len(results)):
# iterate through results and parse out the urls
result = results[num]
soup = BeautifulSoup(str(result), 'html.parser')
hosts: list = str(soup.find('pre')).splitlines()
hosts: list = str(soup.find('pre')).splitlines() if num != 2 else \
[line for line in str(soup.find('pre')).splitlines() if 'A total of' not in line]
# The last iteration is special because findomain throws in some more lines that we need to filter out
await self.clean_hosts(hosts)
except Exception as e:
print(f'An exception has occurred: {e}')
Expand All @@ -50,8 +53,5 @@ async def process(self):
async def clean_hosts(self, soup_hosts):
for host in soup_hosts:
host = str(host).strip()
if len(host) > 1 and 'pre' not in host:
if host[0] == '.':
self.totalhosts.add(host[1:])
else:
self.totalhosts.add(host)
if len(host) > 1 and self.word.replace('www.', '') in host:
self.totalhosts.add(host[1:] if host[0] == '.' else host)

0 comments on commit 30140fc

Please sign in to comment.