-
Notifications
You must be signed in to change notification settings - Fork 105
/
Copy pathAutoBuild.jl
1663 lines (1457 loc) · 67.2 KB
/
AutoBuild.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
export build_tarballs, autobuild, print_artifacts_toml, build, get_meta_json
import GitHub: gh_get_json, DEFAULT_API
import SHA: sha256, sha1
using TOML, Dates, UUIDs
using RegistryTools
import LibGit2
import PkgLicenses
const DEFAULT_JULIA_VERSION_SPEC = "1.0"
const DEFAULT_JLLWRAPPERS_VERSION_SPEC = "1.2.0"
const PKG_VERSIONS = Base.VERSION >= v"1.7-" ? Pkg.Versions : Pkg.Types
mutable struct BuildTimer
begin_setup::Float64
end_setup::Float64
begin_build::Float64
end_build::Float64
begin_audit::Float64
end_audit::Float64
begin_package::Float64
end_package::Float64
BuildTimer() = new(NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN)
end
function Base.show(io::IO, t::BuildTimer)
function rnd(a, b)
min, sec = divrem(b - a, 60)
out = ""
if min ≥ 1
out *= string(Int(min), "m ")
end
out *= string(round(sec; digits=2), "s")
return out
end
# Sanity check: make sure all fields are non-NaN: if that's not the case, just skip.
if all(.!(isnan.(getfield.((t,), fieldnames(BuildTimer)))))
print(io, "Timings: ",
"setup: ", rnd(t.begin_setup, t.end_setup), ", ",
"build: ", rnd(t.begin_build, t.end_build), ", ",
"audit: ", rnd(t.begin_audit, t.end_audit), ", ",
"packaging: ", rnd(t.begin_package, t.end_package),
)
end
end
exclude_logs(_, f) = f != "logs"
only_logs(_, f) = f == "logs"
# Helper function to get the minimum version supported by the given compat
# specification, given as a string.
minimum_compat(compat::String) =
minimum(VersionNumber(rng.lower.t) for rng in PKG_VERSIONS.semver_spec(compat).ranges)
const BUILD_HELP = (
"""
Usage: build_tarballs.jl [target1,target2,...] [--help]
[--verbose] [--debug]
[--deploy] [--deploy-bin] [--deploy-jll]
[--register] [--meta-json]
Options:
targets By default `build_tarballs.jl` will build a tarball
for every target within the `platforms` variable.
To override this, pass in a list of comma-separated
target triplets for each target to be built. Note
that this can be used to build for platforms that
are not listed in the 'default list' of platforms
in the build_tarballs.jl script.
--verbose This streams compiler output to stdout during the
build which can be very helpful for finding bugs.
Note that it is colorized if you pass the
--color=yes option to julia, see examples below.
--debug=<mode> This causes a failed build to drop into an
interactive shell for debugging purposes. `<mode>`
can be one of `error`, `begin` or `end`. `error`
drops you into the interactive shell only when there
is an error during the build, this is the default
when no mode is specified. `begin` forces an error
at the beginning of the build, before any command in
the script is run. `end` forces an error at the end
of the build script, useful to debug a successful
build for which the auditor would fail.
--deploy=<repo> Deploy binaries and JLL wrapper code to a github
release of an autogenerated repository. Uses
`github.com/JuliaBinaryWrappers/<name>_jll.jl` by
default, unless `<repo>` is set, in which case it
should be set as `<owner>/<name>_jll.jl`. Setting
this option is equivalent to setting `--deploy-bin`
and `--deploy-jll`. If `<repo>` is set to "local"
then nothing will be uploaded, but JLL packages
will still be written out to `~/.julia/dev/`.
--deploy-bin=<repo> Deploy just the built binaries
--deploy-jll=<repo> Deploy just the JLL code wrappers
--register=<depot> Register into the given depot. If no path is
given, defaults to `~/.julia`. Registration
requires deployment of the JLL wrapper code, so
so using `--register` without `--deploy` or the
more specific `--deploy-jll` is an error.
--meta-json Output a JSON representation of the given build
instead of actually building. Note that this can
(and often does) output multiple JSON objects for
multiple platforms, multi-stage builds, etc...
--skip-audit Skips auditing of the output products.
--help Print out this message.
Examples:
julia --color=yes build_tarballs.jl --verbose
This builds all tarballs, with colorized output.
julia build_tarballs.jl x86_64-linux-gnu,i686-linux-gnu
This builds two tarballs for the two platforms given, with a
minimum of output messages.
Supported Platforms:
$(join(sort(triplet.(supported_platforms())), "\n "))
"""
)
"""
build_tarballs(ARGS, src_name, src_version, sources, script, platforms,
products, dependencies; kwargs...)
This should be the top-level function called from a `build_tarballs.jl` file.
It takes in the information baked into a `build_tarballs.jl` file such as the
`sources` to download, the `products` to build, etc... and will automatically
download, build and package the tarballs, generating a `build.jl` file when
appropriate.
Generally, `ARGS` should be the top-level Julia `ARGS` command-line arguments
object. `build_tarballs` does some rudimentary parsing of the arguments. To
see what it can do, you can call it with `--help` in the `ARGS` or see the
[Command Line](@ref) section in the manual.
The `kwargs` are passed on to [`autobuild`](@ref), see there for a list of
supported ones. A few additional keyword arguments are accept:
* `julia_compat` can be set to a version string which is used to set the
supported Julia version in the `[compat]` section of the `Project.toml` of
the generated JLL package. The default value is `"1.0"`.
* `lazy_artifacts` sets whether the artifacts should be lazy.
* `init_block` may be set to a string containing Julia code; if present, this
code will be inserted into the initialization path of the generated JLL
package. This can for example be used to invoke an initialization API of a
shared library.
* `augment_platform_block` may be set to a string containing Julia code; if
present, this code will be inserted into the top-level of the
generated JLL package. It must define a function `augment_platform!` that
takes as a single argument, the target platform and returns the target
platform, with amended tags. This augmented platform will then be used by the
JLL wrapper to select the artifact. Note that this option requires the Julia
compatibility `julia_compat` to be 1.6 or higher.
!!! note
The `init_block` and `augment_platform_block` keyword arguments are experimental
and may be removed in a future version of this package. Please use them sparingly.
"""
function build_tarballs(ARGS, src_name, src_version, sources, script,
platforms, products, dependencies;
julia_compat::String = DEFAULT_JULIA_VERSION_SPEC,
kwargs...)
@nospecialize
# See if someone has passed in `--help`, and if so, give them the
# assistance they so clearly long for
if "--help" in ARGS
println(BUILD_HELP)
return nothing
end
if !Base.isidentifier(src_name)
error("Package name \"$(src_name)\" is not a valid identifier")
end
# Throw an error if we're going to build for platforms not supported by Julia v1.5-.
if any(p -> arch(p) == "armv6l" || (Sys.isapple(p) && arch(p) == "aarch64"), platforms) && minimum_compat(julia_compat) < v"1.6"
error("Experimental platforms cannot be used with Julia v1.5-.\nChange `julia_compat` to require at least Julia v1.6")
end
# XXX: These are needed as long as we support old-style sources and
# dependencies. Raise a warning for now, deprecate in BB 0.3+
sources = coerce_source.(sources)
dependencies = coerce_dependency.(dependencies)
# Reject user supplied dependencies using a VersionSpec: these should
# either use compat, or build_version, or both (depending on what they are
# trying to achieve). We cannot check for this in the Dependency
# constructor, as there are several valid situations in which we *do* want
# to store versions here (e.g. after running the dependency through the
# package resolver).
for dep in dependencies
if dep isa Dependency && dep.pkg.version != Pkg.Types.VersionSpec("*")
error("Dependency $(dep.pkg.name) specifies a version, use build_version and/or compat instead")
end
end
# Do not clobber caller's ARGS
ARGS = deepcopy(ARGS)
# This sets whether we should build verbosely or not
verbose = check_flag!(ARGS, "--verbose")
# This sets whether auditing should be skipped
skip_audit = check_flag!(ARGS, "--skip-audit")
# This sets whether we drop into a debug shell on failure or not
debug, debug_mode = extract_flag!(ARGS, "--debug", "error")
# Are we skipping building and just outputting JSON?
meta_json, meta_json_file = extract_flag!(ARGS, "--meta-json")
# This sets whether we are going to deploy our binaries/wrapper code to GitHub releases
deploy, deploy_repo = extract_flag!(ARGS, "--deploy", "JuliaBinaryWrappers/$(src_name)_jll.jl")
deploy_bin, deploy_bin_repo = extract_flag!(ARGS, "--deploy-bin", "JuliaBinaryWrappers/$(src_name)_jll.jl")
deploy_jll, deploy_jll_repo = extract_flag!(ARGS, "--deploy-jll", "JuliaBinaryWrappers/$(src_name)_jll.jl")
# Resolve deploy settings
if deploy
deploy_bin = true
deploy_jll = true
deploy_bin_repo = deploy_repo
deploy_jll_repo = deploy_repo
elseif deploy_bin # make sure bin repo and jll repo match
deploy_jll_repo = deploy_bin_repo
elseif deploy_jll
deploy_bin_repo = deploy_jll_repo
elseif deploy_bin && deploy_jll
if deploy_bin_repo != deploy_jll_repo
error("Binaries and JLLs must be deployed to the same repositories")
end
end
# This sets whether we are going to register, and if so, which
register, register_path = extract_flag!(ARGS, "--register", Pkg.depots1())
if register && !deploy_jll
error("Cannot register without deploying!")
end
if register && deploy_jll_repo == "local"
error("Cannot register with a local deployment!")
end
if deploy_bin || deploy_jll
code_dir = joinpath(Pkg.devdir(), "$(src_name)_jll")
# Shove them into `kwargs` so that we are conditionally passing them along
kwargs = (; kwargs..., code_dir = code_dir)
end
# If --meta-json was passed, error out if any confusing options were passed
meta_json_stream = nothing
if meta_json
if deploy || deploy_bin || deploy_jll
error("Cannot specify --deploy* with --meta-json!")
end
if register
error("Cannot specify --register with --meta-json!")
end
if debug
error("Cannot specify --debug with --meta-json!")
end
# Otherwise, check to see if we're spitting it out to stdout or a file:
if meta_json_file === nothing
meta_json_stream = stdout
else
meta_json_stream = open(meta_json_file, "a")
end
end
# If the user passed in a platform (or a few, comma-separated) on the
# command-line, use that instead of our default platforms
if length(ARGS) > 0
platforms = BinaryBuilderBase.parse_platform.(split(ARGS[1], ","))
end
# Check to make sure we have the necessary environment stuff
if deploy_bin || deploy_jll
# Check to see if we've already got a wrapper package within the Registry,
# choose a version number that is greater than anything else existent.
build_version = get_next_wrapper_version(src_name, src_version)
if deploy_jll_repo != "local"
@info("Building and deploying version $(build_version) to $(deploy_jll_repo)")
# We need to make sure that the JLL repo at least exists, so that we can deploy binaries to it
# even if we're not planning to register things to it today.
init_jll_package(code_dir, deploy_jll_repo)
else
@info("Building and deploying version $(build_version) to $(code_dir)")
# XXX: should we intialize the git repository here? The problem is that if we
# don't clone for the remote we end up with a diverging history.
end
tag = "$(src_name)-v$(build_version)"
end
# Modify script for debugging
if debug
if debug_mode == "begin"
script = "false\n" * script
elseif debug_mode == "end"
script = script * "\nfalse"
end
end
args = (
# Source information
src_name,
src_version,
sources,
# Build script
script,
# Platforms to build for
platforms,
# Products we're expecting
products,
# Dependencies that must be downloaded
dependencies,
)
extra_kwargs = extract_kwargs(kwargs, (:lazy_artifacts, :init_block, :augment_platform_block))
if meta_json_stream !== nothing
# If they've asked for the JSON metadata, by all means, give it to them!
dict = get_meta_json(args...; extra_kwargs..., julia_compat=julia_compat)
println(meta_json_stream, JSON.json(dict))
if meta_json_stream !== stdout
close(meta_json_stream)
end
build_output_meta = Dict()
else
# Build the given platforms using the given sources
build_output_meta = autobuild(
# Controls output product placement, mount directory placement, etc...
pwd(),
args...;
# Flags
verbose,
debug,
skip_audit,
kwargs...,
)
end
if deploy_jll
if verbose
@info("Committing and pushing $(src_name)_jll.jl wrapper code version $(build_version)...")
end
# For deploy discard build-only dependencies
# and make sure we get a `Vector{Dependency}`
dependencies = Dependency[dep for dep in dependencies if is_runtime_dependency(dep)]
# The location the binaries will be available from
bin_path = "https://github.com/$(deploy_jll_repo)/releases/download/$(tag)"
build_jll_package(src_name, build_version, sources, code_dir, build_output_meta,
dependencies, bin_path; verbose, julia_compat, extra_kwargs...)
if deploy_jll_repo != "local"
push_jll_package(src_name, build_version; code_dir=code_dir, deploy_repo=deploy_jll_repo)
end
if register
if verbose
@info("Registering new wrapper code version $(build_version)...")
end
register_jll(src_name, build_version, dependencies, julia_compat;
deploy_repo=deploy_jll_repo, code_dir=code_dir, extra_kwargs...)
end
end
if deploy_bin && deploy_bin_repo != "local"
# Upload the binaries
if verbose
@info("Deploying binaries to release $(tag) on $(deploy_bin_repo) via `ghr`...")
end
upload_to_github_releases(deploy_bin_repo, tag, joinpath(pwd(), "products"); verbose=verbose)
end
return build_output_meta
end
function check_flag!(ARGS, flag)
flag_present = flag in ARGS
filter!(x -> x != flag, ARGS)
return flag_present
end
function extract_flag!(ARGS, flag, val = nothing)
for f in ARGS
if f == flag || startswith(f, string(flag, "="))
# Check if it's just `--flag` or if it's `--flag=foo`
if f != flag
val = split(f, '=')[2]
end
# Drop this value from our ARGS
filter!(x -> x != f, ARGS)
return (true, val)
end
end
return (false, val)
end
"""
get_compilers_versions(; compilers = [:c])
Return the script string that is used to print the versions of the given `compilers`.
"""
function get_compilers_versions(; compilers = [:c])
output =
"""
set -x
"""
if :c in compilers
output *=
"""
cc --version
c++ --version
gcc --version
g++ --version
clang --version
clang++ --version
objc --version
f77 --version
gfortran --version
ld -v
"""
end
if :go in compilers
output *=
"""
go version
"""
end
if :rust in compilers
output *=
"""
rustc --version
rustup --version
cargo --version
"""
end
return output
end
function upload_to_github_releases(repo, tag, path; gh_auth=Wizard.github_auth(;allow_anonymous=false),
attempts::Int = 3, verbose::Bool = false)
for attempt in 1:attempts
try
ghr() do ghr_path
run(`$ghr_path -u $(dirname(repo)) -r $(basename(repo)) -t $(gh_auth.token) $(tag) $(path)`)
end
return
catch
if verbose
@info("`ghr` upload step failed, beginning attempt #$(attempt)...")
end
end
end
error("Unable to upload $(path) to GitHub repo $(repo) on tag $(tag)")
end
function get_next_wrapper_version(src_name::AbstractString, src_version::VersionNumber)
# If src_version already has a build_number, just return it immediately
if src_version.build != ()
return src_version
end
ctx = Pkg.Types.Context()
# Force-update the registry here, since we may have pushed a new version recently
update_registry(devnull)
jll_name = "$(src_name)_jll"
uuid = jll_uuid(jll_name)
# If it does, we need to bump the build number up to the next value
build_number = UInt64(0)
if uuid in Pkg.Types.registered_uuids(ctx.registries, jll_name)
# Collect all version numbers of the package across all registries.
versions = VersionNumber[]
for reg in ctx.registries
if !haskey(reg, uuid)
continue
end
pkg_info = Pkg.Registry.registry_info(reg[uuid])
append!(versions, sort!(collect(keys(pkg_info.version_info))))
end
unique!(sort!(versions))
# Find largest version number that matches ours
filter!(v -> (v.major == src_version.major) &&
(v.minor == src_version.minor) &&
(v.patch == src_version.patch) &&
(v.build isa Tuple{<:UInt}), versions)
# Our build number must be larger than the maximum already present in the registry
if !isempty(versions)
build_number = first(maximum(versions).build) + 1
end
end
# Construct build_version (src_version + build_number)
build_version = VersionNumber(src_version.major, src_version.minor,
src_version.patch, src_version.prerelease, (build_number,))
end
function _registered_packages(registry_url::AbstractString)
tmp_dir = mktempdir()
atexit(() -> rm(tmp_dir; force = true, recursive = true))
registry_dir = joinpath(tmp_dir, "REGISTRY")
LibGit2.clone(registry_url, registry_dir)
registry = TOML.parsefile(joinpath(registry_dir, "Registry.toml"))
packages = Vector{String}(undef, 0)
for p in registry["packages"]
push!(packages, p[2]["name"])
end
rm(tmp_dir; force = true, recursive = true)
return packages
end
function _package_is_registered(registry_url::AbstractString,
package::AbstractString)
registered_packages = _registered_packages(registry_url)
return package in registered_packages
end
is_yggdrasil() = get(ENV, "YGGDRASIL", "false") == "true"
# Use a Buildkite environment variable to get the current commit hash
yggdrasil_head() = get(ENV, "BUILDKITE_COMMIT", "")
function register_jll(name, build_version, dependencies, julia_compat;
deploy_repo="JuliaBinaryWrappers/$(name)_jll.jl",
code_dir=joinpath(Pkg.devdir(), "$(name)_jll"),
gh_auth=Wizard.github_auth(;allow_anonymous=false),
gh_username=gh_get_json(DEFAULT_API, "/user"; auth=gh_auth)["login"],
augment_platform_block::String="",
lazy_artifacts::Bool=!isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7",
kwargs...)
if !isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.6"
error("Augmentation blocks cannot be used with Julia v1.5-.\nChange `julia_compat` to require at least Julia v1.6")
end
# Calculate tree hash of wrapper code
wrapper_tree_hash = bytes2hex(Pkg.GitTools.tree_hash(code_dir))
wrapper_commit_hash = LibGit2.head(code_dir)
# Use RegistryTools to push up a new `General` branch with this JLL package registered within it
# TODO: Update our fork periodically from upstream `General`.
cache = RegistryTools.RegistryCache(joinpath(Pkg.depots1(), "registries_binarybuilder"))
registry_url = "https://$(gh_username):$(gh_auth.token)@github.com/JuliaRegistries/General"
cache.registries[registry_url] = Base.UUID("23338594-aafe-5451-b93e-139f81909106")
jllwrappers_compat = isempty(augment_platform_block) ? DEFAULT_JLLWRAPPERS_VERSION_SPEC : "1.4.0"
project = Pkg.Types.Project(build_project_dict(name, build_version, dependencies, julia_compat; jllwrappers_compat, lazy_artifacts, augment_platform_block))
errors = setdiff(RegistryTools.registrator_errors, [:version_less_than_all_existing])
reg_branch = RegistryTools.register(
"https://github.com/$(deploy_repo).git",
project,
wrapper_tree_hash;
registry=registry_url,
cache=cache,
push=true,
checks_triggering_error = errors,
)
if haskey(reg_branch.metadata, "error")
@error(reg_branch.metadata["error"])
else
upstream_registry_url = "https://github.com/JuliaRegistries/General"
name_jll = "$(name)_jll"
if _package_is_registered(upstream_registry_url, name_jll)
pr_title = "New version: $(name_jll) v$(build_version)"
else
pr_title = "New package: $(name_jll) v$(build_version)"
end
# Open pull request against JuliaRegistries/General
body = """
Autogenerated JLL package registration
* Registering JLL package $(basename(deploy_repo))
* Repository: https://github.com/$(deploy_repo)
* Version: v$(build_version)
* Commit: $(wrapper_commit_hash)
"""
if is_yggdrasil()
commit_hash = yggdrasil_head()
body *= """
* Revision on Yggdrasil: https://github.com/JuliaPackaging/Yggdrasil/commit/$commit_hash
"""
commit_author_login = get_github_author_login("JuliaPackaging/Yggdrasil", commit_hash; gh_auth=gh_auth)
if commit_author_login !== nothing
body *= """
* Created by: @$commit_author_login
"""
end
end
params = Dict(
"base" => "master",
"head" => "$(reg_branch.branch)",
"maintainer_can_modify" => true,
"title" => pr_title,
"body" => body,
)
Wizard.create_or_update_pull_request("JuliaRegistries/General", params; auth=gh_auth)
end
end
function get_meta_json(
src_name::AbstractString,
src_version::VersionNumber,
sources::Vector{<:AbstractSource},
script::AbstractString,
platforms::Vector,
products::Vector{<:Product},
dependencies::Vector{<:AbstractDependency};
julia_compat::String = DEFAULT_JULIA_VERSION_SPEC,
init_block::String = "",
augment_platform_block::String = "",
lazy_artifacts::Bool=!isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7",
)
dict = Dict(
"name" => src_name,
"version" => "v$(src_version)",
"sources" => sources,
"script" => script,
"products" => products,
"dependencies" => dependencies,
"julia_compat" => julia_compat,
"lazy_artifacts" => lazy_artifacts,
"init_block" => init_block,
"augment_platform_block" => augment_platform_block,
)
# Do not write the list of platforms when building only for `AnyPlatform`
if platforms != [AnyPlatform()]
dict["platforms"] = triplet.(platforms)
end
return dict
end
function compose_debug_prompt(workspace)
log_files = String[]
for (root, dirs, files) in walkdir(joinpath(workspace, "srcdir"))
for file in files
if endswith(file, ".log")
push!(log_files, replace(joinpath(root, file), workspace => "\${WORKSPACE}"))
end
end
end
if length(log_files) > 0
log_files_str = join(log_files, "\n - ")
debug_shell_prompt = """
Build failed, the following log files were generated:
- $log_files_str
Launching debug shell:
"""
else
debug_shell_prompt = "Build failed, launching debug shell:"
end
return debug_shell_prompt
end
"""
autobuild(dir::AbstractString, src_name::AbstractString,
src_version::VersionNumber, sources::Vector,
script::AbstractString, platforms::Vector,
products::Vector, dependencies::Vector;
verbose = false, debug = false,
skip_audit = false, ignore_audit_errors = true,
autofix = true, code_dir = nothing,
meta_json_file = nothing, require_license = true,
dont_dlopen = false, kwargs...)
Runs the boiler plate code to download, build, and package a source package
for a list of platforms. This method takes a veritable truckload of arguments,
here are the relevant actors, broken down in brief:
* `dir`: the root of the build; products will be placed within `dir`/products,
and mountpoints will be placed within `dir`/build/.
* `src_name`: the name of the source package being built and will set the name
of the built tarballs.
* `src_version`: the version of the source package.
* `platforms`: a list of platforms to build for.
* `sources`: a vector of all sources to download and unpack before building
begins, as [`AbstractSource`](@ref)s.
* `script`: a string representing a shell script to run as the build.
* `products`: the list of `Product`s which shall be built.
* `dependencies`: a vector of JLL dependency packages as
[`AbstractDependency`](@ref) that should be installed before building begins.
* `verbose`: Enable verbose mode. What did you expect?
* `debug`: cause a failed build to drop into an interactive shell so that
the build can be inspected easily.
* `skip_audit`: disable the typical audit that occurs at the end of a build.
* `ignore_audit_errors`: do not kill a build even if a problem is found.
* `autofix`: give `BinaryBuilder` permission to automatically fix issues it
finds during audit passes. Highly recommended.
* `code_dir`: sets where autogenerated JLL packages will be put.
* `require_license` enables a special audit pass that requires licenses to be
installed by all packages.
* `dont_dlopen`: don't try to `dlopen` library products. This is separate from
specifying `dont_dlopen` on a `LibraryProduct` in that it still results in
the generated JLL loading the library at run time, and only prevents
BinaryBuilder from doing so during JLL generation.
"""
function autobuild(dir::AbstractString,
src_name::AbstractString,
src_version::VersionNumber,
sources::Vector{<:AbstractSource},
script::AbstractString,
platforms::Vector,
products::Vector{<:Product},
dependencies::Vector{<:AbstractDependency};
verbose::Bool = false,
debug::Bool = false,
skip_audit::Bool = false,
ignore_audit_errors::Bool = true,
autofix::Bool = true,
code_dir::Union{String,Nothing} = nothing,
require_license::Bool = true,
dont_dlopen::Bool = false,
kwargs...)
@nospecialize
# This is what we'll eventually return
@info("Building for $(join(sort(triplet.(platforms)), ", "))")
build_output_meta = Dict()
# Resolve dependencies into PackageSpecs now, ensuring we have UUIDs for all deps
all_resolved, dependencies = resolve_jlls(dependencies, outs=(verbose ? stdout : devnull))
if !all_resolved
error("Invalid dependency specifications!")
end
# If the user passed in a src_version with a build number, bail out
if any(!isempty, (src_version.prerelease, src_version.build))
error("Will not build with a `src_version` that does not have the format `major.minor.patch`! Do not set prerelease or build numbers.")
end
# We must prepare our sources. Download them, hash them, etc...
source_files = download_source.(sources; verbose=verbose)
# Our build products will go into ./products
out_path = joinpath(dir, "products")
try mkpath(out_path) catch; end
for platform in sort(collect(platforms), by = triplet)
timer = BuildTimer()
timer.begin_setup = time()
# We build in a platform-specific directory
build_path = joinpath(dir, "build", triplet(platform))
mkpath(build_path)
shards = choose_shards(platform; extract_kwargs(kwargs, (:preferred_gcc_version,:preferred_llvm_version,:bootstrap_list,:compilers))...)
concrete_platform = get_concrete_platform(platform, shards)
prefix = setup_workspace(
build_path,
source_files,
concrete_platform,
default_host_platform;
verbose=verbose,
)
setup_deps(f, prefix, dependencies, platform, verbose) =
setup_dependencies(prefix, Pkg.Types.PackageSpec[getpkg(d) for d in filter_platforms(dependencies, platform) if f(d) && is_build_dependency(d)], platform; verbose)
host_artifact_paths = setup_deps(is_host_dependency, prefix, dependencies, default_host_platform, verbose)
target_artifact_paths = setup_deps(is_target_dependency, prefix, dependencies, concrete_platform, verbose)
# Create a runner to work inside this workspace with the nonce built-in
ur = preferred_runner()(
prefix.path;
cwd = "/workspace/srcdir",
platform = concrete_platform,
verbose = verbose,
workspaces = [
joinpath(prefix, "metadir") => "/meta",
],
compiler_wrapper_dir = joinpath(prefix, "compiler_wrappers"),
src_name = src_name,
shards = shards,
extract_kwargs(kwargs, (:preferred_gcc_version,:preferred_llvm_version,:compilers,:allow_unsafe_flags,:lock_microarchitecture))...,
)
# Set up some bash traps
trapper_wrapper = """
# Stop if we hit any errors.
set -e
# If we're running as `bash`, then use the `DEBUG` and `ERR` traps
if [ \$(basename \$0) = "bash" ]; then
trap "RET=\\\$?; \\
trap - DEBUG INT TERM ERR EXIT; \\
set +e +x; \\
auto_install_license; \\
save_env; \\
exit \\\$RET" \\
EXIT
trap "RET=\\\$?; \\
trap - DEBUG INT TERM ERR EXIT; \\
set +e +x; \\
echo Previous command \\\$! exited with \\\$RET >&2; \\
save_env; \\
exit \\\$RET" \\
INT TERM ERR
# Start saving everything into our history
trap save_history DEBUG
else
# If we're running in `sh` or something like that, we need a
# slightly slimmer set of traps. :(
trap "RET=\\\$?; \\
echo Previous command exited with \\\$RET >&2; \\
set +e +x; \\
save_env; \\
exit \\\$RET" \\
EXIT INT TERM
fi
$(script)
"""
dest_prefix = Prefix(BinaryBuilderBase.destdir(prefix.path, concrete_platform))
did_succeed = with_logfile(dest_prefix, "$(src_name).log"; subdir=src_name) do io
# Let's start the presentations with BinaryBuilder.jl
write(io, "BinaryBuilder.jl version: $(get_bb_version())\n\n")
# Get the list of compilers...
compilers = extract_kwargs(kwargs, (:compilers,))
# ...because we want to log all their versions. However, we don't
# want this to be shown in the console, so we first run this without
# teeing to stdout
run(ur, `/bin/bash -l -c $(get_compilers_versions(; compilers...))`, io;
verbose = verbose, tee_stream = devnull)
timer.end_setup = time()
# Run the build script
timer.begin_build = time()
res = run(ur, `/bin/bash -l -c $(trapper_wrapper)`, io; verbose=verbose)
timer.end_build = time()
res
end
if !did_succeed
if debug
# Print debug prompt and paths to any generated log files
debug_shell_prompt = compose_debug_prompt(prefix.path)
@warn(debug_shell_prompt)
run_interactive(ur, `/bin/bash -l -i`)
end
msg = "Build for $(src_name) on $(triplet(platform)) did not complete successfully\n"
error(msg)
end
# Run an audit of the prefix to ensure it is properly relocatable
timer.begin_audit = time()
if !skip_audit
audit_result = audit(dest_prefix, src_name;
platform=platform, verbose=verbose,
has_csl = any(getname.(dependencies) .== "CompilerSupportLibraries_jll"),
autofix=autofix, require_license=require_license)
if !audit_result && !ignore_audit_errors
msg = replace("""
Audit failed for $(dest_prefix.path).
Address the errors above to ensure relocatability.
To override this check, set `ignore_audit_errors = true`.
""", '\n' => ' ')
error(strip(msg))
end
end
timer.end_audit = time()
# Finally, error out if something isn't satisfied
unsatisfied_so_die = false
for p in products
if platform isa AnyPlatform && !(p isa FileProduct)
# `AnyPlatform` is by design platform-independent, so we allow
# only `FileProduct`s.
error("Cannot have $(typeof(p)) for AnyPlatform")
end
if !satisfied(p, dest_prefix; verbose=verbose, platform=platform,
skip_dlopen=dont_dlopen)
if !verbose
# If we never got a chance to see the verbose output, give it here:
locate(p, dest_prefix; verbose=true, platform=platform,
skip_dlopen=dont_dlopen)
end
@error("Built $(src_name) but $(variable_name(p)) still unsatisfied:")
unsatisfied_so_die = true
end
end
if unsatisfied_so_die
error("Cannot continue with unsatisfied build products!")
end
# We also need to capture some info about each product
products_info = Dict{Product,Any}()
for p in products
product_path = locate(p, dest_prefix; platform=platform, skip_dlopen=dont_dlopen)
products_info[p] = Dict("path" => relpath(product_path, dest_prefix.path))
if p isa LibraryProduct || p isa FrameworkProduct
products_info[p]["soname"] = something(
Auditor.get_soname(product_path),
basename(product_path),
)
end
end
# Unsymlink all the deps from the dest_prefix
cleanup_dependencies(prefix, host_artifact_paths, default_host_platform)
cleanup_dependencies(prefix, target_artifact_paths, concrete_platform)
# Search for dead links in dest_prefix; raise warnings about them.
Auditor.warn_deadlinks(dest_prefix.path)
# Cull empty directories, for neatness' sake, unless auditing is disabled
if !skip_audit
for (root, dirs, files) = walkdir(dest_prefix.path; topdown=false)
# We do readdir() here because `walkdir()` does not do a true in-order traversal
if isempty(readdir(root))
rm(root)
end
end
end
# Compress log files
compress_dir(logdir(dest_prefix; subdir=src_name); verbose)
# Once we're built up, go ahead and package this dest_prefix out
timer.begin_package = time()
tarball_path, tarball_hash, git_hash = package(
dest_prefix,
joinpath(out_path, src_name),
src_version;
platform=platform,
verbose=verbose,
force=true,
# Do not include logs into the main tarball
filter=exclude_logs,
)
# Create another tarball only for the logs
package(
dest_prefix,
joinpath(out_path, src_name * "-logs"),
src_version;
platform=platform,
verbose=verbose,
force=true,
filter=only_logs,
)
timer.end_package = time()
build_output_meta[platform] = (
tarball_path,
tarball_hash,
git_hash,
products_info,
)
# Destroy the workspace, taking care to make sure that we don't run into any
# permissions errors while we do so.
Base.Filesystem.prepare_for_deletion(prefix.path)
rm(prefix.path; recursive=true)
# If the whole build_path is empty, then remove it too. If it's not, it's probably
# because some other build is doing something simultaneously with this target, and we
# don't want to mess with their stuff.
if isempty(readdir(build_path))
rm(build_path; recursive=true)