Skip to content

Commit bf33bea

Browse files
authored
[Sphinx Extension] Add offline build support (#15)
* Add -o/--offline mode, rely on the local lock file * Fix grammar, update docs, clean up code * Add named arguments, Remove -o flag * Change fls.lock to spec.lock * Remove print() * Update fls.lock -> spec.lock * Remove redundant offline configuration in build process * Enhance build script with timing and docstrings
1 parent 7136e10 commit bf33bea

File tree

5 files changed

+94
-38
lines changed

5 files changed

+94
-38
lines changed

builder/build_cli.py

Lines changed: 42 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,17 +13,43 @@
1313
import sys
1414
import requests
1515
import json
16+
import time
1617

1718
# Automatically watch the following extra directories when --serve is used.
1819
EXTRA_WATCH_DIRS = ["exts", "themes"]
1920

2021
SPEC_CHECKSUM_URL = "https://spec.ferrocene.dev/paragraph-ids.json"
2122
SPEC_LOCKFILE = "spec.lock"
2223

23-
def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
24+
def build_docs(
25+
root: Path,
26+
builder: str,
27+
clear: bool,
28+
serve: bool,
29+
debug: bool,
30+
offline: bool,
31+
spec_lock_consistency_check: bool
32+
) -> Path:
33+
"""
34+
Builds the Sphinx documentation with the specified options.
35+
36+
Args:
37+
root: The root directory of the documentation.
38+
builder: The builder to use (e.g., 'html', 'xml').
39+
clear: Whether to disable incremental builds.
40+
serve: Whether to start a local server with live reload.
41+
debug: Whether to enable debug mode.
42+
offline: Whether to build in offline mode.
43+
spec_lock_consistency_check: Whether to check spec lock consistency.
44+
45+
Returns:
46+
Path: The path to the generated documentation.
47+
"""
48+
2449
dest = root / "build"
2550

2651
args = ["-b", builder, "-d", dest / "doctrees"]
52+
2753
if debug:
2854
# Disable parallel builds and show exceptions in debug mode.
2955
#
@@ -42,6 +68,8 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
4268
# Add configuration options as needed
4369
if not spec_lock_consistency_check:
4470
conf_opt_values.append("enable_spec_lock_consistency=0")
71+
if offline:
72+
conf_opt_values.append("offline=1")
4573
# Only add the --define argument if there are options to define
4674
if conf_opt_values:
4775
args.append("--define")
@@ -58,6 +86,9 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
5886
args += ["-W", "--keep-going"]
5987

6088
try:
89+
90+
# Tracking build time
91+
timer_start = time.perf_counter()
6192
subprocess.run(
6293
[
6394
"sphinx-autobuild" if serve else "sphinx-build",
@@ -73,6 +104,8 @@ def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check):
73104
print("\nhint: if you see an exception, pass --debug to see the full traceback")
74105
exit(1)
75106

107+
timer_end = time.perf_counter()
108+
print(f"\nBuild finished in {timer_end - timer_start:.2f} seconds.")
76109
return dest / builder
77110

78111
def update_spec_lockfile(spec_checksum_location, lockfile_location):
@@ -110,16 +143,21 @@ def main(root):
110143
parser.add_argument(
111144
"-c", "--clear", help="disable incremental builds", action="store_true"
112145
)
146+
parser.add_argument(
147+
"--offline",
148+
help="build in offline mode",
149+
action="store_true",
150+
)
113151
group = parser.add_mutually_exclusive_group()
114152
parser.add_argument(
115153
"--ignore-spec-lock-diff",
116-
help="ignore fls.lock file differences with live release -- for WIP branches only",
154+
help="ignore spec.lock file differences with live release -- for WIP branches only",
117155
default=False,
118156
action="store_true"
119157
)
120158
parser.add_argument(
121159
"--update-spec-lock-file",
122-
help="update fls.lock file",
160+
help="update spec.lock file",
123161
action="store_true"
124162
)
125163
group.add_argument(
@@ -145,6 +183,6 @@ def main(root):
145183
update_spec_lockfile(SPEC_CHECKSUM_URL, root / "src" / SPEC_LOCKFILE)
146184

147185
rendered = build_docs(
148-
root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, not args.ignore_spec_lock_diff
186+
root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, args.offline, not args.ignore_spec_lock_diff
149187
)
150188

exts/coding_guidelines/README.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ Coverage of the coding guidlines over the FLS is calculated.
2323
Each coding guideline has its ``:fls:`` option turned into a hyperlink to the corresponding element
2424
within the FLS to be able to navigate there directly.
2525

26-
Further an ``fls.lock`` file located at ``root/src/fls.lock`` is validated against the currently
26+
Further an ``spec.lock`` file located at ``root/src/spec.lock`` is validated against the currently
2727
deployed version of the Ferrocene Language Spec and the build is failed if there is discrepency.
2828

2929
Links to the Rust standard library

exts/coding_guidelines/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,11 @@ def merge_domaindata(self, docnames, other):
3535
def setup(app):
3636

3737
app.add_domain(CodingGuidelinesDomain)
38+
app.add_config_value(
39+
name = "offline",
40+
default=False,
41+
rebuild= "env"
42+
) # register the offline option
3843
app.add_config_value(
3944
name="spec_std_docs_url",
4045
default="https://doc.rust-lang.org/stable/std",

exts/coding_guidelines/fls_checks.py

Lines changed: 45 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -20,28 +20,27 @@ def check_fls(app, env):
2020
"""Main checking function for FLS validation"""
2121
# First make sure all guidelines have correctly formatted FLS IDs
2222
check_fls_exists_and_valid_format(app, env)
23+
offline_mode = env.config.offline
2324

2425
# Gather all FLS paragraph IDs from the specification and get the raw JSON
25-
fls_ids, raw_json_data = gather_fls_paragraph_ids(fls_paragraph_ids_url)
26-
26+
fls_ids, raw_json_data = gather_fls_paragraph_ids(app, fls_paragraph_ids_url)
2727
# Error out if we couldn't get the raw JSON data
2828
if not raw_json_data:
2929
error_message = f"Failed to retrieve or parse the FLS specification from {fls_paragraph_ids_url}"
3030
logger.error(error_message)
31-
raise FLSValidationError(error_message)
32-
33-
# Check for differences against lock file
34-
has_differences, differences = check_fls_lock_consistency(app, env, raw_json_data)
35-
if has_differences:
36-
error_message = "The FLS specification has changed since the lock file was created:\n"
37-
for diff in differences:
38-
error_message += f" - {diff}\n"
39-
error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes."
40-
error_message += "\nOnce resolved, you may run the following to update the local spec lock file:"
41-
error_message += "\n\t./make.py --update-spec-lock-file"
42-
logger.error(error_message)
43-
raise FLSValidationError(error_message)
44-
31+
raise FLSValidationError(error_message)
32+
if not offline_mode: # in offline mode, ignore checking against the lock file
33+
# Check for differences against lock file
34+
has_differences, differences = check_fls_lock_consistency(app, env, raw_json_data)
35+
if has_differences:
36+
error_message = "The FLS specification has changed since the lock file was created:\n"
37+
for diff in differences:
38+
error_message += f" - {diff}\n"
39+
error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes."
40+
error_message += "\nOnce resolved, you may run the following to update the local spec lock file:"
41+
error_message += "\n\t./make.py --update-spec-lock-file"
42+
logger.error(error_message)
43+
raise FLSValidationError(error_message)
4544
# Check if all referenced FLS IDs exist
4645
check_fls_ids_correct(app, env, fls_ids)
4746

@@ -154,37 +153,51 @@ def check_fls_ids_correct(app, env, fls_ids):
154153
logger.info("All FLS references in guidelines are valid")
155154

156155

157-
def gather_fls_paragraph_ids(json_url):
156+
def gather_fls_paragraph_ids(app, json_url):
158157
"""
159-
Gather all Ferrocene Language Specification paragraph IDs from the paragraph-ids.json file,
160-
including both container section IDs and individual paragraph IDs.
158+
Gather all Ferrocene Language Specification paragraph IDs from the paragraph-ids.json file
159+
or from the lock file in offline mode, including both container section IDs and individual paragraph IDs.
161160
162161
Args:
162+
app: The Sphinx application
163163
json_url: The URL or path to the paragraph-ids.json file
164164
165165
Returns:
166166
Dictionary mapping paragraph IDs to metadata AND the complete raw JSON data
167167
"""
168-
logger.info("Gathering FLS paragraph IDs from %s", json_url)
168+
offline = app.config.offline
169+
lock_path = app.confdir / 'spec.lock'
169170

170171
# Dictionary to store all FLS IDs and their metadata
171172
all_fls_ids = {}
172173
raw_json_data = None
173174

174175
try:
175176
# Load the JSON file
176-
response = requests.get(json_url)
177-
response.raise_for_status() # Raise exception for HTTP errors
178-
179-
# Parse the JSON data
180-
try:
181-
raw_json_data = response.json()
182-
data = raw_json_data # Keep reference to the original data
183-
logger.debug("Successfully parsed JSON data")
184-
except json.JSONDecodeError as e:
185-
logger.error(f"Failed to parse JSON: {e}")
186-
logger.debug(f"Response content preview: {response.text[:500]}...")
187-
raise
177+
if not offline:
178+
logger.info("Gathering FLS paragraph IDs from %s", json_url)
179+
response = requests.get(json_url)
180+
response.raise_for_status() # Raise exception for HTTP errors
181+
# Parse the JSON data
182+
try:
183+
raw_json_data = response.json()
184+
data = raw_json_data # Keep reference to the original data
185+
logger.debug("Successfully parsed JSON data")
186+
except json.JSONDecodeError as e:
187+
logger.error(f"Failed to parse JSON: {e}")
188+
logger.debug(f"Response content preview: {response.text[:500]}...")
189+
raise
190+
191+
else : # if online mode is on read from the lock file
192+
193+
if not lock_path.exists():
194+
logger.warning(f"No FLS lock file found at {lock_path}") # TODO: returns an error
195+
return False, []
196+
logger.info("Gathering FLS paragraph IDs from lock file: %s", lock_path)
197+
with open(lock_path, 'r', encoding='utf-8') as f:
198+
raw_json_data=f.read()
199+
data = json.loads(raw_json_data)
200+
188201

189202
# Check if we have the expected document structure
190203
if 'documents' not in data:

exts/coding_guidelines/fls_linking.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def load_fls_ids(app):
2626
"""Load FLS IDs and their URLs."""
2727
try:
2828
from . import fls_checks
29-
fls_ids, _ = fls_checks.gather_fls_paragraph_ids(app.config.fls_paragraph_ids_url)
29+
fls_ids, _ = fls_checks.gather_fls_paragraph_ids(app, app.config.fls_paragraph_ids_url )
3030
return {fls_id: data['url'] for fls_id, data in fls_ids.items()}
3131
except Exception as e:
3232
logger.error(f"Failed to load FLS IDs: {e}")

0 commit comments

Comments
 (0)