Skip to content

Add parse_MyPage to separate parsing logic and handler #40

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions eviden/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,8 @@
from .connection import get, get_with_session, post_with_session, authenticate
from .generator import (
generate_hidden_params,
generate_project_info,
generate_issues,
find_board_id
parse_MyPage
)

BASE_URL = "https://etrack.timedia.co.jp/EasyTracker/"
Expand Down Expand Up @@ -74,20 +73,24 @@ def login(user_id, password):

def list_projects():
URL = BASE_URL + "main/MyPage.aspx"

html = get_with_session(URL)
projects = parse_MyPage(html)

project_info = generate_project_info(html)

for (group, name) in project_info:
print(f"{name}@{group}")
for project in projects:
print(f"{project.name}@{project.group}")


def select_project(name):
url = BASE_URL + "main/MyPage.aspx"

html = get_with_session(url)
board_id = find_board_id(html, name)
projects = parse_MyPage(html)

for project in projects:
if project.name == name:
board_id = project.id
break
else:
sys.exit("その名前のプロジェクトは存在しません")

status = read_json(STATUS_PATH)
status["paramators"]["board_id"] = board_id
Expand Down
35 changes: 12 additions & 23 deletions eviden/generator.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from bs4 import BeautifulSoup
import sys
from typing import Generator, NamedTuple

HIDDEN_PARAMS = [
"__VIEWSTATE",
Expand All @@ -24,18 +24,6 @@ def generate_hidden_params(html, request="POST"):
return data


def generate_project_info(html):
soup = BeautifulSoup(html, "html.parser")

TABLE_ID = "_ctl0_ContentPlaceHolder1_gridList"
table = soup.find(attrs={"id": TABLE_ID})

rows = table.find_all("tr")[1:]
project_info = [list(map(__to_text, row.find_all("td")[:2])) for row in rows]

return project_info


def generate_issues(html):
soup = BeautifulSoup(html, "html.parser")

Expand All @@ -49,16 +37,17 @@ def generate_issues(html):
return issues


def find_board_id(html, name):
soup = BeautifulSoup(html, "html.parser")
Project = NamedTuple('Project', [('id', str),
('name', str),
('group', str)])

TABLE_ID = "_ctl0_ContentPlaceHolder1_gridList"
table = soup.find(attrs={"id": TABLE_ID})
rows = table.find_all("tr")[1:]

for row in rows:
project_name = row.find_all("td")[1]
if project_name.text == name:
return project_name.a.get("href").split("=")[1]
def parse_MyPage(html: str) -> Generator[Project, None, None]:
root = BeautifulSoup(html, 'html.parser')
table = root.find(attrs={'id': '_ctl0_ContentPlaceHolder1_gridList'})
rows = table.find_all('tr')[1:] # skip a table header row

sys.exit("その名前のプロジェクトは存在しません")
for row in rows:
group, name, last_updated, description, status = row.find_all('td', recursive=False)
project_id = name.a.get('href').split('=')[-1]
yield Project(project_id, name.text, group.text)
38 changes: 13 additions & 25 deletions tests/test_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@
from eviden.generator import (
generate_hidden_params,
generate_issues,
generate_project_info,
find_board_id,
parse_MyPage,
HIDDEN_PARAMS
)

Expand Down Expand Up @@ -71,9 +70,10 @@ def randomstr(b):
project_info_tr_html = """
<tr>
<td>{0[0]}</td>
<td><a href="#">{0[1]}</a></td>
<td>{0[2]}</td>
<td><a href="https://eviden.example.com/IssueList.aspx?board_id={0[2]}">{0[1]}</a></td>
<td>{0[3]}</td>
<td>{0[4]}</td>
<td>{0[5]}</td>
</tr>
"""

Expand Down Expand Up @@ -139,19 +139,6 @@ def test_generate_hidden_params(self):

self.assertEqual(data, expected)

def test_generate_project_info(self):
N = random.randint(1, 50)

random_rows = [[randomstr(40) for _ in range(4)] for __ in range(N)]
rows = [project_info_tr_html.format(random_rows[i]) for i in range(N)]

html = generate_project_info_html.format("".join(rows))

data = generate_project_info(html)
expected = [random_rows[i][0:2] for i in range(N)]

self.assertEqual(data, expected)

def test_generate_issues(self):
N = random.randint(1, 50)
random_rows = [[randomstr(40) for _ in range(7)] for __ in range(N)]
Expand All @@ -162,18 +149,19 @@ def test_generate_issues(self):

self.assertEqual(data, random_rows)

def test_find_board_id(self):
def test_parse_MyPage(self):
N = random.randint(1, 50)
pares = [[randomstr(40) for _ in range(3)] for __ in range(N)]
rows = [find_board_id_td_html.format(pares[i]) for i in range(N)]

html = find_board_id_html.format("".join(rows))
random_rows = [[randomstr(40) for _ in range(6)] for __ in range(N)]
rows = [project_info_tr_html.format(random_rows[i]) for i in range(N)]

M = random.randint(0, N - 1)
_, board_id, name = pares[M]
data = find_board_id(html, name)
html = generate_project_info_html.format("".join(rows))

self.assertEqual(data, board_id)
projects = parse_MyPage(html)
for i, project in enumerate(projects):
self.assertEqual(project.id, random_rows[i][2])
self.assertEqual(project.name, random_rows[i][1])
self.assertEqual(project.group, random_rows[i][0])


if __name__ == "__main__":
Expand Down