forked from coreos/coreos-assembler
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcmd-buildprep
executable file
·199 lines (156 loc) · 6.95 KB
/
cmd-buildprep
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/usr/bin/env python3
# NOTE: PYTHONUNBUFFERED is set in cmdlib.sh for unbuffered output
#
# Fetches the bare minimum from external servers to create the next build. May
# require configured AWS credentials if bucket and objects are not public.
import argparse
import os
import subprocess
import sys
import requests
import shutil
from tenacity import retry, retry_if_exception_type
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cosalib.builds import Builds, BUILDFILES
from cosalib.cmdlib import load_json, rm_allow_noent, retry_stop, retry_callback
from cosalib.s3 import download_file, head_bucket, head_object
retry_requests_exception = (retry_if_exception_type(requests.Timeout) |
retry_if_exception_type(requests.ReadTimeout) |
retry_if_exception_type(requests.ConnectTimeout) |
retry_if_exception_type(requests.ConnectionError))
def main():
args = parse_args()
if args.url.startswith("s3://"):
fetcher = S3Fetcher(args.url)
elif args.url.startswith("http://") or args.url.startswith("https://"):
fetcher = HTTPFetcher(args.url)
elif args.url.startswith("file://") or args.url.startswith("/"):
fetcher = LocalFetcher(args.url)
else:
raise Exception("Invalid scheme: only file://, s3://, and http(s):// supported")
builds = None
if fetcher.exists('builds.json'):
if not args.refresh:
if os.path.isfile(BUILDFILES['sourcedata']):
# If we have local builds, don't overwrite that by default.
if subprocess.call(['cmp', BUILDFILES['sourcedata'], BUILDFILES['list']],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL) != 0:
raise SystemExit(f"{BUILDFILES['list']} modified locally, run with --refresh")
fetcher.fetch_json('builds.json')
else:
fetcher.fetch('builds.json', dest=BUILDFILES['sourcedata'])
print(f"Updated {BUILDFILES['sourcedata']}")
return
# Record the origin and original state
with open(BUILDFILES['sourceurl'], 'w') as f:
f.write(args.url + '\n')
subprocess.check_call(['cp', '-pf', '--reflink=auto', BUILDFILES['list'], BUILDFILES['sourcedata']])
builds = Builds()
if not builds or builds.is_empty():
print("Remote has no builds!")
return
# NB: We only buildprep for the arch we're on for now. Could probably make
# this a switch in the future.
buildid = builds.get_latest()
builddir = builds.get_build_dir(buildid)
os.makedirs(builddir, exist_ok=True)
# trim out the leading builds/
assert builddir.startswith("builds/")
builddir = builddir[len("builds/"):]
objects = ['meta.json', 'ostree-commit-object']
for f in objects:
fetcher.fetch(f'{builddir}/{f}')
buildmeta = load_json(f'builds/{builddir}/meta.json')
if args.ostree:
f = 'ostree-commit.tar'
if 'ostree' in buildmeta['images']:
f = buildmeta['images']['ostree']['path']
fetcher.fetch(f'{builddir}/{f}')
# and finally the symlink
rm_allow_noent('builds/latest')
os.symlink(buildid, 'builds/latest')
# also nuke the any local matching OStree ref, since we want to build on
# top of this new one
if 'ref' in buildmeta and os.path.isdir('tmp/repo'):
subprocess.check_call(['ostree', 'refs', '--repo', 'tmp/repo',
'--delete', buildmeta['ref']],
stdout=subprocess.DEVNULL)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("url", metavar='URL',
help="URL from which to fetch metadata")
parser.add_argument("--ostree", action='store_true',
help="Also download full OSTree commit")
parser.add_argument("--refresh", action='store_true',
help="Assuming local changes, only update {BUILDFILES['sourcedata']}")
return parser.parse_args()
class Fetcher(object):
def __init__(self, url_base):
self.url_base = url_base
def fetch(self, path, dest=None):
# if no specific dest given, assume it's a path under builds/
if dest is None:
dest = f'builds/{path}'
# NB: `urllib.parse.urljoin()` does not do what one thinks it does.
# Using `os.path.join()` is a hack, but eh... we're not planning to run
# on Windows anytime soon.
url = os.path.join(self.url_base, path)
print(f"Fetching: {url}")
# ensure the dir for dest file exists
# otherwise s3 download_file won't be able to write temp file
os.makedirs(os.path.dirname(dest), exist_ok=True)
self.fetch_impl(url, dest)
return dest
def fetch_impl(self, url, dest):
raise NotImplementedError
def exists_impl(self, url):
raise NotImplementedError
def fetch_json(self, path):
return load_json(self.fetch(path))
def exists(self, path):
url = os.path.join(self.url_base, path)
return self.exists_impl(url)
class HTTPFetcher(Fetcher):
def __init__(self, url_base):
super().__init__(url_base)
@retry(stop=retry_stop, retry=retry_requests_exception, before_sleep=retry_callback)
def fetch_impl(self, url, dest):
# notice we don't use `stream=True` here; the stuff we're fetching for
# now is super small
with requests.get(url) as r:
r.raise_for_status()
with open(dest, mode='wb') as f:
f.write(r.content)
@retry(stop=retry_stop, retry=retry_requests_exception, before_sleep=retry_callback)
def exists_impl(self, url):
with requests.head(url) as r:
if r.status_code == 200:
return True
if r.status_code == 404:
return False
raise Exception(f"Received rc {r.status_code} for {url}")
class S3Fetcher(Fetcher):
def fetch_impl(self, url, dest):
assert url.startswith("s3://")
bucket, key = url[len("s3://"):].split('/', 1)
# this function does not need to be retried with the decorator as download_file would
# retry automatically based on s3config settings
download_file(bucket, key, dest)
def exists_impl(self, url):
assert url.startswith("s3://")
bucket, key = url[len("s3://"):].split('/', 1)
# sanity check that the bucket exists and we have access to it
head_bucket(bucket=bucket)
return head_object(bucket=bucket, key=key)
class LocalFetcher(Fetcher):
def __init__(self, url_base):
if url_base.startswith("file://"):
url_base = url_base[len("file://"):]
super().__init__(url_base)
def fetch_impl(self, url, dest):
shutil.copyfile(url, dest)
def exists_impl(self, url):
return os.path.exists(url)
if __name__ == '__main__':
sys.exit(main())