Skip to content

Commit 5a7fa80

Browse files
authored
Merge pull request #2731 from norio-nomura/update-template-debian.sh
Refactor `update-template-ubuntu.sh` and add `update-template-debian.sh`
2 parents 8fbdf10 + 7ff9f6b commit 5a7fa80

File tree

4 files changed

+1018
-198
lines changed

4 files changed

+1018
-198
lines changed

hack/cache-common-inc.sh

Lines changed: 42 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
#!/usr/bin/env bash
22

3+
# print the error message and exit with status 1
4+
function error_exit() {
5+
echo "Error: $*" >&2
6+
exit 1
7+
}
8+
39
# e.g.
410
# ```console
511
# $ download_template_if_needed templates/default.yaml
@@ -97,7 +103,7 @@ function size_from_location() {
97103
)
98104
}
99105

100-
# Check the remote location and return the http code and size.
106+
# Check the remote location and print the http code and size.
101107
# If GITHUB_ACTIONS is true, the result is not cached.
102108
# e.g.
103109
# ```console
@@ -113,7 +119,7 @@ function check_location() {
113119
fi
114120
}
115121

116-
# Check the remote location and return the http code and size.
122+
# Check the remote location and print the http code and size.
117123
# The result is cached in .check_location-response-cache.yaml
118124
# e.g.
119125
# ```console
@@ -209,8 +215,7 @@ function location_to_sha256() {
209215
elif command -v shasum >/dev/null; then
210216
sha256="$(echo -n "${location}" | shasum -a 256 | cut -d' ' -f1)"
211217
else
212-
echo "sha256sum or shasum not found" >&2
213-
exit 1
218+
error_exit "sha256sum or shasum not found"
214219
fi
215220
echo "${sha256}"
216221
)
@@ -351,16 +356,32 @@ function hash_file() {
351356
# /Users/user/Library/Caches/lima/download/by-url-sha256/346ee1ff9e381b78ba08e2a29445960b5cd31c51f896fc346b82e26e345a5b9a/data # on macOS
352357
# /home/user/.cache/lima/download/by-url-sha256/346ee1ff9e381b78ba08e2a29445960b5cd31c51f896fc346b82e26e345a5b9a/data # on others
353358
function download_to_cache() {
354-
local code_time_type_url
355-
code_time_type_url=$(
356-
curl -sSLI -w "%{http_code}\t%header{Last-Modified}\t%header{Content-Type}\t%{url_effective}" "$1" -o /dev/null
357-
)
359+
local cache_path
360+
cache_path=$(location_to_cache_path "$1")
361+
# before checking remote location, check if the data file is already downloaded and the time file is updated within 10 minutes
362+
if [[ -f ${cache_path}/data && -n "$(find "${cache_path}/time" -mmin -10 || true)" ]]; then
363+
echo "${cache_path}/data"
364+
return
365+
fi
366+
367+
# check the remote location
368+
local curl_info_json write_out
369+
write_out='{
370+
"http_code":%{http_code},
371+
"last_modified":"%header{Last-Modified}",
372+
"content_type":"%{content_type}",
373+
"url":"%{url_effective}",
374+
"filename":"%{filename_effective}"
375+
}'
376+
curl_info_json=$(curl -sSLI -w "${write_out}" "$1" -o /dev/null)
358377

359378
local code time type url
360-
IFS=$'\t' read -r code time type url filename <<<"${code_time_type_url}"
361-
[[ ${code} == 200 ]] || exit 1
379+
code=$(jq -r '.http_code' <<<"${curl_info_json}")
380+
time=$(jq -r '.last_modified' <<<"${curl_info_json}")
381+
type=$(jq -r '.content_type' <<<"${curl_info_json}")
382+
url=$(jq -r '.url' <<<"${curl_info_json}")
383+
[[ ${code} == 200 ]] || error_exit "Failed to download $1"
362384

363-
local cache_path
364385
cache_path=$(location_to_cache_path "${url}")
365386
[[ -d ${cache_path} ]] || mkdir -p "${cache_path}"
366387

@@ -369,18 +390,23 @@ function download_to_cache() {
369390
[[ -f ${cache_path}/time && "$(<"${cache_path}/time")" == "${time}" ]] || needs_download=1
370391
[[ -f ${cache_path}/type && "$(<"${cache_path}/type")" == "${type}" ]] || needs_download=1
371392
if [[ ${needs_download} -eq 1 ]]; then
372-
local code_time_type_url_filename
373-
code_time_type_url_filename=$(
393+
curl_info_json=$(
374394
echo "downloading ${url}" >&2
375-
curl -SL -w "%{http_code}\t%header{Last-Modified}\t%header{Content-Type}\t%{url_effective}\t%{filename_effective}" --no-clobber -o "${cache_path}/data" "${url}"
395+
curl -SL -w "${write_out}" --no-clobber -o "${cache_path}/data" "${url}"
376396
)
377397
local filename
378-
IFS=$'\t' read -r code time type url filename <<<"${code_time_type_url_filename}"
379-
[[ ${code} == 200 ]] || exit 1
398+
code=$(jq -r '.http_code' <<<"${curl_info_json}")
399+
time=$(jq -r '.last_modified' <<<"${curl_info_json}")
400+
type=$(jq -r '.content_type' <<<"${curl_info_json}")
401+
url=$(jq -r '.url' <<<"${curl_info_json}")
402+
filename=$(jq -r '.filename' <<<"${curl_info_json}")
403+
[[ ${code} == 200 ]] || error_exit "Failed to download ${url}"
380404
[[ "${cache_path}/data" == "${filename}" ]] || mv "${filename}" "${cache_path}/data"
381405
# sha256.digest seems existing if expected digest is available. so, not creating it here.
382406
# sha256sum "${cache_path}/data" | awk '{print "sha256:"$1}' >"${cache_path}/sha256.digest"
383407
echo -n "${time}" >"${cache_path}/time"
408+
else
409+
touch "${cache_path}/time"
384410
fi
385411
[[ -f ${cache_path}/type ]] || echo -n "${type}" >"${cache_path}/type"
386412
[[ -f ${cache_path}/url ]] || echo -n "${url}" >"${cache_path}/url"

0 commit comments

Comments
 (0)