Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions hw5/xzgao/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Manifest.toml
10 changes: 10 additions & 0 deletions hw5/xzgao/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
[deps]
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
GenericMessagePassing = "09522ef3-fe1e-4698-a056-61d75781c616"
GenericTensorNetworks = "3521c873-ad32-4bb4-b63d-f4f178f42b49"
ProblemReductions = "899c297d-f7d2-4ebf-8815-a35996def416"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
TensorInference = "c2297e78-99bd-40ad-871d-f50e56b81012"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
11 changes: 11 additions & 0 deletions hw5/xzgao/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# BP for K-SAT

Backend: GenericMessagePassing.jl at https://github.com/ArrogantGao/GenericMessagePassing.jl, it implements the bp algorithm, random K-SAT instances, and its tensor network model.

In `entropy_density.jl`, we run BP on random K-SAT instances with (n = 100, k = 3) and plot the entropy density.

![](data/entropy_density_n100_k3.png)

The one from the book:

![](data/entropy_density_book.png)
Binary file added hw5/xzgao/data/entropy_density_book.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
274 changes: 274 additions & 0 deletions hw5/xzgao/data/entropy_density_n100_k3.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,274 @@
alpha,i,s
0.0,1,0.6931471805599453
0.1,1,0.6790999467837726
0.1,2,0.6800251804727442
0.1,3,0.6801899288447792
0.1,4,0.6795836826266146
0.1,5,0.6792935596747252
0.1,6,0.6803815452719987
0.1,7,0.6801618783355029
0.1,8,0.6790480481973937
0.1,9,0.6800503978197058
0.1,10,0.6803447416634999
0.2,1,0.6673829480080138
0.2,2,0.6670434230897822
0.2,3,0.6648712310302326
0.2,4,0.6646276394453658
0.2,5,0.6659300576623509
0.2,6,0.667085229771226
0.2,7,0.6659916616533295
0.2,8,0.6679906996620106
0.2,9,0.6647895324233575
0.2,10,0.6637528576653087
0.3,1,0.6525456697897927
0.3,2,0.654299490163617
0.3,3,0.6530088444872716
0.3,4,0.6507246565988851
0.3,5,0.6485257913830038
0.3,6,0.6549636690866625
0.3,7,0.6539124260317021
0.3,8,0.6513289698961374
0.3,9,0.6544591938584727
0.3,10,0.6536911660090448
0.4,1,0.6387054866324502
0.4,2,0.6423047839673345
0.4,3,0.6347521429057291
0.4,4,0.642619872891665
0.4,5,0.6379122701668761
0.4,6,0.63799669602225
0.4,7,0.6408790452687119
0.4,8,0.6377405291194315
0.4,9,0.6381629668182108
0.4,10,0.6448644540965185
0.5,1,0.6265623099720838
0.5,2,0.623927875967623
0.5,3,0.6251328867267543
0.5,4,0.6294577914305831
0.5,5,0.6247898422978961
0.5,6,0.6268226999573523
0.5,7,0.6293872638700229
0.5,8,0.6286583893390294
0.5,9,0.6262841180985075
0.5,10,0.6297481736014915
0.6,1,0.6114486172292307
0.6,2,0.6111951298387873
0.6,3,0.6126552389564782
0.6,4,0.6109492730546948
0.6,5,0.6178605246324542
0.6,6,0.6158778442640866
0.6,7,0.6154299221920287
0.6,8,0.6133573692976563
0.6,9,0.6121883190652502
0.6,10,0.6093772877343056
0.7,1,0.5912635581435949
0.7,2,0.6021092850389396
0.7,3,0.5944559657500806
0.7,4,0.5957982833129788
0.7,5,0.6035457190744373
0.7,6,0.6052223422785058
0.7,7,0.5983146867926818
0.7,8,0.5939132188742859
0.7,9,0.5993457733985194
0.7,10,0.5978126999998632
0.8,1,0.584252643437818
0.8,2,0.5808696455334558
0.8,3,0.5807135210774176
0.8,4,0.5828554569386983
0.8,5,0.5872187472347321
0.8,6,0.5875513664075709
0.8,7,0.587169057188139
0.8,8,0.5904828218903777
0.8,9,0.5842345454220221
0.8,10,0.5858498383573942
0.9,1,0.5739300071913969
0.9,2,0.5703809670872193
0.9,3,0.5724697157585872
0.9,4,0.5725680074881494
0.9,5,0.5728637077179407
0.9,6,0.5681119832541914
0.9,7,0.5671616075447853
0.9,8,0.5763220706562293
0.9,9,0.5714168975661065
0.9,10,0.5765423814373886
1.0,1,0.5548571268150341
1.0,2,0.5587804301673123
1.0,3,0.5593362870439788
1.0,4,0.5621929682657698
1.0,5,0.5614602364661441
1.0,6,0.5611640372233383
1.0,7,0.5710435974168447
1.0,8,0.5636294226323793
1.0,9,0.5527885527346308
1.0,10,0.5643153196540523
1.1,1,0.5398708384951282
1.1,2,0.5473373733521184
1.1,3,0.5470407644328801
1.1,4,0.5441801074454662
1.1,5,0.5420161002250731
1.1,6,0.5463893901313917
1.1,7,0.5470108458884015
1.1,8,0.537856387987518
1.1,9,0.5486751729834962
1.1,10,0.5421044436875438
1.2,1,0.5323499184341108
1.2,2,0.5331653899532108
1.2,3,0.5406454933383777
1.2,4,0.5386484475022736
1.2,5,0.5306032015650721
1.2,6,0.5372851004377622
1.2,7,0.5315864408051287
1.2,8,0.5364444528710947
1.2,9,0.53125920248462
1.2,10,0.5234734114167605
1.3,1,0.5106135903095841
1.3,2,0.5229121247337315
1.3,3,0.5177335378744238
1.3,4,0.5089306541048023
1.3,5,0.5158560829916774
1.3,6,0.5256370833261577
1.3,7,0.5153625526708594
1.3,8,0.5219495178383968
1.3,9,0.5056866506923129
1.3,10,0.5190417266699571
1.4,1,0.49934741078339256
1.4,2,0.49076389715995944
1.4,3,0.5094040106163116
1.4,4,0.48945464810369516
1.4,5,0.5104885839111089
1.4,6,0.5009997254404321
1.4,7,0.49492411788946555
1.4,8,0.5032272217870457
1.4,9,0.49849360087814093
1.4,10,0.5013925496720927
1.5,1,0.48728086654203523
1.5,2,0.4926943440235831
1.5,3,0.4908907922053655
1.5,4,0.4952785252205426
1.5,5,0.48601376629319076
1.5,6,0.4874022064379095
1.5,7,0.48988129089293975
1.5,8,0.5003835494426216
1.5,9,0.49266334435984704
1.5,10,0.49408034528986355
1.6,1,0.4835377784768341
1.6,2,0.4685377215579908
1.6,3,0.4786363712865587
1.6,4,0.4593144077645966
1.6,5,0.46608260691082637
1.6,6,0.4767872083502502
1.6,7,0.4755367849374978
1.6,8,0.4754268190522978
1.6,9,0.47340748619320594
1.6,10,0.4821446153215031
1.7,1,0.4708466762650886
1.7,2,0.441606714764976
1.7,3,0.45209962277215804
1.7,4,0.4730663034610707
1.7,5,0.4567936834899476
1.7,6,0.4583116454343104
1.7,7,0.4491132560109192
1.7,8,0.47205612205584946
1.7,9,0.4496554299666957
1.7,10,0.46681171267342286
1.8,1,0.4514875850814667
1.8,2,0.4515027632880637
1.8,3,0.43445219445119326
1.8,4,0.4341022066100708
1.8,5,0.45717403492633535
1.8,6,0.44517118943122225
1.8,7,0.44851458227336494
1.8,8,0.4636913870450634
1.8,9,0.4468698084302742
1.8,10,0.448620017547144
1.9,1,0.42234052869286365
1.9,2,0.4307660795850879
1.9,3,0.4188742370557052
1.9,4,0.43912425930305865
1.9,5,0.43499659312357397
1.9,6,0.4157837049014153
1.9,7,0.4452215824430968
1.9,8,0.4301823016840852
1.9,9,0.42962412854431475
1.9,10,0.42768640340421016
2.0,1,0.4355209608039018
2.0,2,0.41744631471869625
2.0,3,0.42592169226548504
2.0,4,0.43563667796999667
2.0,5,0.4152686180725072
2.0,6,0.41748740851884336
2.0,7,0.4214598050809417
2.0,8,0.42952243111888627
2.0,9,0.42466668366538296
2.0,10,0.426074851983079
2.1,1,0.41707688651724206
2.1,2,0.41062288992853696
2.1,3,0.4054184679809699
2.1,4,0.4132443522914631
2.1,5,0.39480199365134055
2.1,6,0.40570746787395245
2.1,7,0.40200378360208633
2.1,8,0.41578600645170255
2.1,9,0.41423265741067355
2.1,10,0.4075042303162798
2.2,1,0.39985853317694386
2.2,2,0.39169550427656985
2.2,3,0.3907998829837967
2.2,4,0.3848135146381096
2.2,5,0.41147161218804096
2.2,6,0.38670313516012933
2.2,7,0.36302293486321496
2.2,8,0.4032965161302723
2.2,9,0.3817160043875977
2.2,10,0.3987166952058528
2.3,1,0.35171364408759503
2.3,2,0.39147397330665273
2.3,3,0.38479424502692533
2.3,4,0.39206352711514697
2.3,5,0.37777586777704536
2.3,6,0.36541925753819626
2.3,7,0.3795943406135836
2.3,8,0.3742656741118522
2.3,9,0.40564121109827833
2.3,10,0.3788564880164945
2.4,1,0.3774607042776892
2.4,2,0.36609126605080244
2.4,3,0.37870833631054246
2.4,4,0.3697542173826738
2.4,5,0.3434891497861705
2.4,6,0.3686407746556351
2.4,7,0.3782787262899543
2.4,8,0.3875361282228726
2.4,9,0.347727551803425
2.4,10,0.3433190015413281
2.5,1,0.3547987670529708
2.5,2,0.3482528446092057
2.5,3,0.36116703403676587
2.5,4,0.35113680430324
2.5,5,0.37448954512660165
2.5,6,0.34150675376786616
2.5,7,0.3428279123402617
2.5,8,0.3544982389575442
2.5,9,0.35881892452176906
2.5,10,0.35716394464281004
2.6,1,0.3363127583214849
2.6,2,0.31390989862854013
2.6,3,0.35846021187084753
2.6,4,0.3141142268145741
2.6,5,0.31055982782638547
2.6,6,0.31930866646854184
2.6,7,0.34298013188399573
2.6,8,0.33939549621213033
2.6,9,0.3317609835271241
2.6,10,0.34537249532666814
2.7,1,0.29467733874929825
2.7,2,0.3363573026852739
2.7,3,0.31526877629403277
2.7,4,0.32437642800199995
2.7,5,0.3111740210534709
2.7,6,0.3142513099333933
2.7,7,0.3378583666336558
2.7,8,0.3440543054333602
2.7,9,0.3613402761371557
2.7,10,0.3138528042501571
2.8,1,0.3101174536605548
2.8,2,0.2903377189138998
Binary file added hw5/xzgao/data/entropy_density_n100_k3.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
16 changes: 16 additions & 0 deletions hw5/xzgao/entropy_density.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
using GenericMessagePassing
using CSV, DataFrames

function main(n, k, ms, n_samples)
df = CSV.write("data/entropy_density_n$(n)_k$(k).csv", DataFrame(alpha=Float64[], i = Int[], s=Float64[]))
for m in ms
for i in 1:n_samples
k_sat = random_k_sat(n, k, m)
tn = tn_model(k_sat)
S = entropy_bp(tn.code, tn.tensors, BPConfig())
CSV.write(df, DataFrame(alpha=m/n, i = i, s=S/n), append=true)
end
end
end

main(100, 3, 10:10:450, 10)
15 changes: 15 additions & 0 deletions hw5/xzgao/plot.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
using CSV, DataFrames, CairoMakie
using Statistics

df = CSV.read("data/entropy_density_n100_k3.csv", DataFrame)

fig = Figure()
ax = Axis(fig[1, 1], xlabel="α", ylabel="s")

alphas = unique(df.alpha)
lines!(ax, alphas, [mean(df.s[df.alpha .== alpha]) for alpha in alphas], label="BP entropy density")

xlims!(ax, 0.0, 5.0)
ylims!(ax, 0.0, 0.7)

save("data/entropy_density_n100_k3.png", fig)