-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathfetch_blogs.py
More file actions
141 lines (124 loc) · 5.75 KB
/
fetch_blogs.py
File metadata and controls
141 lines (124 loc) · 5.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import yaml
import json
from datetime import datetime
from operator import itemgetter
import re
import os
from github import Github
FETCH_BLOGS = os.environ['FETCH_BLOGS']
g = Github(FETCH_BLOGS)
#print ('current working dir:', os.getcwd())
#github_api_url = 'https://api.github.com/repos/{}/{}/contents/{}'
repos = [
('xrdocs', 'design', '_blogs'), #this is the error repo
('xrdocs', 'design', '_tutorials'),
('xrdocs', 'virtual-routing', '_blogs'),
('xrdocs', 'virtual-routing', '_tutorials'),
('xrdocs', 'ztp', '_blogs'),
('xrdocs', 'ztp', '_tutorials'),
('xrdocs', 'cisco-service-layer', '_blogs'),
('xrdocs', 'cisco-service-layer', '_tutorials'),
('xrdocs', 'security', '_blogs'),
('xrdocs', 'security', '_tutorials'),
('xrdocs', 'asr9k', '_blogs'),
('xrdocs', 'asr9k', '_tutorials'),
('xrdocs', 'cnbng', '_blogs'),
('xrdocs', 'cnbng', '_tutorials'),
('xrdocs', '8000', '_blogs'),
('xrdocs', '8000', '_tutorials'),
('xrdocs', 'packet-fronthaul', '_blogs'),
('xrdocs', 'packet-fronthaul', '_tutorials'),
('xrdocs', 'multicast', '_blogs'),
('xrdocs', 'multicast', '_tutorials'),
('xrdocs', 'programmability', '_blogs'),
('xrdocs', 'programmability', '_tutorials'),
('xrdocs', 'automation', '_blogs'),
('xrdocs', 'automation', '_tutorials'),
('xrdocs', 'application-hosting', '_blogs'),
('xrdocs', 'application-hosting', '_tutorials'),
('xrdocs', 'cloud-scale-networking', '_blogs'),
('xrdocs', 'cloud-scale-networking', '_tutorials'),
('xrdocs', 'device-lifecycle', '_blogs'),
('xrdocs', 'device-lifecycle', '_tutorials'),
('xrdocs', 'telemetry', '_blogs'),
('xrdocs', 'telemetry', '_tutorials'),
('xrdocs', 'tdm2ip', '_blogs'),
('xrdocs', 'tdm2ip', '_tutorials'),
('xrdocs', 'segment-routing', '_blogs'),
('xrdocs', 'segment-routing', '_tutorials'),
('xrdocs', 'routed-pon', '_blogs'),
('xrdocs', 'routed-pon', '_tutorials'),
('xrdocs', 'ncs5500', '_blogs'),
('xrdocs', 'ncs5500', '_tutorials')
]
def remove_date_from_title(title) :
date = r'^\d{4}-\d{2}-\d{2}-(.*)$'
match = re.match(date, title)
if match:
return match.group(1)
else:
return title
#have to add key here, key is only needed when fetching more than 60 requests an hour, only using for testing
#authenticates the request from the bearer key for github
#session = requests.Session()
#session.headers.update(headers)
#parses through front yaml regardless of position of attributes
def get_published_info(content, path, user, repo, directory) :
if content.startswith('---\n') :
last_line = content.find('\n---\n', 4)
if last_line != -1:
first_lines_info = content[4:last_line]
try:
first_lines = yaml.safe_load(first_lines_info)
if isinstance(first_lines, dict) :
first_lines['path'] = path
description = first_lines.get('excerpt', '')
#blank description edge case
if description is not None and isinstance(description, str):
first_lines['description'] = description.strip()
else :
first_lines['description'] = ''
return first_lines
except yaml.YAMLError as error_message :
print(f"Error retrieving first couple of lines: {error_message}")
return {}
def main() :
recent_posts = []
for user, repo_name, directory in repos :
repo_obj = g.get_repo(f"{user}/{repo_name}")
contents = repo_obj.get_contents(directory)
for content_file in contents :
if content_file.name == '2019-02-02-modernizing-ixp-design.md' : #file that contains YAML error in design/_blogs repo
continue
if content_file.type == 'file' and content_file.name.endswith('.md') :
commits = list(repo_obj.get_commits(path = content_file.path))
if commits:
last_commit_date = commits[0].commit.committer.date
date = last_commit_date.replace(tzinfo=None)
file_content = content_file.decoded_content.decode()
first_lines = get_published_info(file_content, content_file.name, user, repo_name, directory)
if first_lines and first_lines.get('published', False) and first_lines.get('position', '') == 'top' and first_lines.get('title') :
title = remove_date_from_title(content_file.name[:-3])
# Check if permalink exists in front matter, otherwise construct from filename
if 'permalink' in first_lines and first_lines['permalink']:
# Permalink is relative, prepend repo name
repo_url = f"https://xrdocs.io/{repo_name}{first_lines['permalink']}"
else:
repo_url = f'https://xrdocs.io/{repo_name}/{directory[1:]}/{content_file.name[:-3]}/'
description = first_lines.get('excerpt', '')
recent_posts.append({
'title' : title,
'date' : date,
'url' : repo_url,
'blog-description' : description
})
recent_posts.sort(key = itemgetter('date'), reverse = True)
recent_posts_data = [{
'title' : file['title'],
'date' : file['date'].isoformat(),
'url' : file['url'],
'blog-description' : file['blog-description']
} for file in recent_posts]
with open('latest-articles.json', 'w') as json_file :
json.dump(recent_posts_data, json_file, indent = 4)
main()