Skip to content

Commit 6b5a82e

Browse files
authored
Upgrade-OMEinsum, File IO (#101)
* upgrade-OMEinsum, new-fileio * update docstrings * bump version
1 parent 0942d7a commit 6b5a82e

File tree

9 files changed

+131
-24
lines changed

9 files changed

+131
-24
lines changed

Makefile

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,10 @@ JL = julia --project
33
default: init test
44

55
init:
6-
$(JL) -e 'using Pkg; Pkg.precompile()'
7-
init-docs:
8-
$(JL) -e 'using Pkg; Pkg.activate("docs"); Pkg.develop(path="."), Pkg.precompile()'
6+
$(JL) -e 'using Pkg; Pkg.precompile(); Pkg.activate("docs"); Pkg.develop(path=".")'
97

108
update:
11-
$(JL) -e 'using Pkg; Pkg.update(); Pkg.precompile()'
12-
update-docs:
13-
$(JL) -e 'using Pkg; Pkg.activate("docs"); Pkg.update(); Pkg.precompile()'
9+
$(JL) -e 'using Pkg; Pkg.update(); Pkg.activate("docs"); Pkg.update()'
1410

1511
test:
1612
$(JL) -e 'using Pkg; Pkg.test("GenericTensorNetworks")'

Project.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "GenericTensorNetworks"
22
uuid = "3521c873-ad32-4bb4-b63d-f4f178f42b49"
33
authors = ["GiggleLiu <cacate0129@gmail.com> and contributors"]
4-
version = "4.0.1"
4+
version = "4.1.0"
55

66
[deps]
77
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
@@ -38,7 +38,7 @@ FFTW = "1.4"
3838
Graphs = "1.7"
3939
LinearAlgebra = "1"
4040
LuxorGraphPlot = "0.5"
41-
OMEinsum = "0.8"
41+
OMEinsum = "0.9.1"
4242
Polynomials = "4"
4343
Primes = "0.5"
4444
ProblemReductions = "0.3"

docs/src/performancetips.md

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ using GenericTensorNetworks, Graphs, Random
2020
graph = random_regular_graph(120, 3)
2121
iset = IndependentSet(graph)
2222
problem = GenericTensorNetwork(iset; optimizer=TreeSA(
23-
sc_target=20, sc_weight=1.0, rw_weight=3.0, ntrials=10, βs=0.01:0.1:15.0, niters=20))
23+
score=ScoreFunction(sc_target=20, sc_weight=1.0, rw_weight=3.0), ntrials=10, βs=0.01:0.1:15.0, niters=20))
2424
```
2525

2626
The `GenericTensorNetwork` constructor maps a problem to a tensor network with an optimized contraction order. The `optimizer` parameter specifies the algorithm to use:
@@ -75,20 +75,19 @@ The finite field approach requires only 298 KB, while using the `Polynomial` typ
7575
## 2. Slicing Technique for Large Problems
7676

7777
For large-scale applications, you can slice over certain degrees of freedom to reduce space complexity. This approach loops and accumulates over selected degrees of freedom, resulting in smaller tensor networks inside the loop.
78-
79-
In the `TreeSA` optimizer, set `nslices` to a value greater than zero:
78+
This can be achieved by setting the `slicer` parameter of the `GenericTensorNetwork` constructor.
8079

8180
```julia
8281
# Without slicing
8382
problem = GenericTensorNetwork(iset; optimizer=TreeSA(βs=0.01:0.1:25.0, ntrials=10, niters=10))
8483
contraction_complexity(problem)
8584

8685
# With slicing over 5 degrees of freedom
87-
problem = GenericTensorNetwork(iset; optimizer=TreeSA(βs=0.01:0.1:25.0, ntrials=10, niters=10, nslices=5))
86+
problem = GenericTensorNetwork(iset; optimizer=TreeSA(βs=0.01:0.1:25.0, ntrials=10, niters=10), slicer=TreeSASlicer(score=ScoreFunction(sc_target=10)))
8887
contraction_complexity(problem)
8988
```
9089

91-
In this example, slicing over 5 degrees of freedom reduces space complexity by a factor of 32 (2^5), while increasing computation time by less than a factor of 2.
90+
In this example, slicing with the `TreeSASlicer` to reach space complexity of 2^10, at the cost of increased time complexity.
9291

9392
## 3. Accelerating Tropical Number Operations
9493

docs/src/ref.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,14 @@ SABipartite
144144
KaHyParBipartite
145145
MergeVectors
146146
MergeGreedy
147+
TreeSASlicer
148+
ScoreFunction
149+
```
150+
151+
## FileIO
152+
```@docs
153+
save_tensor_network
154+
load_tensor_network
147155
```
148156

149157
## Others

src/GenericTensorNetworks.jl

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ using Core: Argument
44
using TropicalNumbers
55
using OMEinsum
66
using OMEinsum: contraction_complexity, timespace_complexity, timespacereadwrite_complexity, getixsv, NestedEinsum, getixs, getiy, DynamicEinCode
7+
using OMEinsum.OMEinsumContractionOrders.JSON
78
using Graphs, Random
89
using DelimitedFiles, Serialization
910
using LuxorGraphPlot
@@ -26,7 +27,7 @@ import StatsBase
2627

2728
# OMEinsum
2829
export timespace_complexity, timespacereadwrite_complexity, contraction_complexity, @ein_str, getixsv, getiyv
29-
export GreedyMethod, TreeSA, SABipartite, KaHyParBipartite, MergeVectors, MergeGreedy
30+
export GreedyMethod, TreeSA, SABipartite, KaHyParBipartite, MergeVectors, MergeGreedy, TreeSASlicer, ScoreFunction
3031

3132
# estimate memory
3233
export estimate_memory
@@ -80,6 +81,9 @@ export read_size, read_count, read_config, read_size_count, read_size_config
8081
export show_graph, show_configs, show_einsum, GraphDisplayConfig, render_locs, show_landscape
8182
export AbstractLayout, SpringLayout, StressLayout, SpectralLayout, Layered, LayeredSpringLayout, LayeredStressLayout
8283

84+
# FileIO
85+
export save_tensor_network, load_tensor_network
86+
8387
project_relative_path(xs...) = normpath(joinpath(dirname(dirname(pathof(@__MODULE__))), xs...))
8488

8589
# Mods.jl fixed to v1.3.4

src/fileio.jl

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,3 +123,69 @@ function dict_deserialize_tree(id::UInt, d::Dict)
123123
end
124124
end
125125

126+
"""
127+
save_tensor_network(tn::GenericTensorNetwork; folder::String)
128+
129+
Serialize a tensor network to disk for storage/reloading. Creates three structured files:
130+
- `code.json`: OMEinsum contraction code (tree structure and contraction order)
131+
- `fixedvertices.json`: JSON-serialized Dict of pinned vertex configurations
132+
- `problem.json`: Problem specification using ProblemReductions serialization
133+
134+
The target folder will be created recursively if it doesn't exist. Files are overwritten
135+
if they already exist. Uses JSON for human-readable serialization with type preservation.
136+
137+
The saved files can be loaded using [`load_tensor_network`](@ref).
138+
139+
# Arguments
140+
- `tn::GenericTensorNetwork`: a [`GenericTensorNetwork`](@ref) instance to serialize. Must contain valid code, problem, and fixedvertices fields.
141+
- `folder::String`: Destination directory path. Parent directories will be created as needed.
142+
"""
143+
function save_tensor_network(tn::GenericTensorNetwork; folder::String)
144+
!isdir(folder) && mkpath(folder)
145+
146+
OMEinsum.writejson(joinpath(folder, "code.json"), tn.code)
147+
148+
open(joinpath(folder, "fixedvertices.json"), "w") do io
149+
JSON.print(io, tn.fixedvertices, 2)
150+
end
151+
152+
ProblemReductions.writejson(joinpath(folder, "problem.json"), tn.problem)
153+
return nothing
154+
end
155+
156+
"""
157+
load_tensor_network(folder::String) -> GenericTensorNetwork
158+
159+
Load a tensor network from disk that was previously saved using [`save_tensor_network`](@ref).
160+
Reconstructs the network from three required files: contraction code, fixed vertices mapping, and problem specification.
161+
162+
# Arguments
163+
- `folder::String`: Path to directory containing saved network files. Must contain:
164+
- `code.json`: Contraction order/structure from OMEinsum
165+
- `fixedvertices.json`: Dictionary of pinned vertex states
166+
- `problem.json`: Problem specification and parameters
167+
168+
# Returns
169+
- `GenericTensorNetwork`: Reconstructed tensor network.
170+
"""
171+
function load_tensor_network(folder::String)
172+
!isdir(folder) && throw(SystemError("Folder not found: $folder"))
173+
174+
code_path = joinpath(folder, "code.json")
175+
fixed_path = joinpath(folder, "fixedvertices.json")
176+
problem_path = joinpath(folder, "problem.json")
177+
178+
!isfile(code_path) && throw(SystemError("Code file not found: $code_path"))
179+
!isfile(fixed_path) && throw(SystemError("Fixedvertices file not found: $fixed_path"))
180+
!isfile(problem_path) && throw(SystemError("Problem file not found: $problem_path"))
181+
182+
code = OMEinsum.readjson(code_path)
183+
184+
fixed_dict = JSON.parsefile(fixed_path)
185+
fixedvertices = Dict{labeltype(code),Int}(parse(Int, k) => v for (k, v) in fixed_dict)
186+
187+
problem = ProblemReductions.readjson(problem_path)
188+
189+
return GenericTensorNetwork(problem, code, fixedvertices)
190+
end
191+

src/networks.jl

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,29 +16,36 @@ end
1616

1717
"""
1818
$TYPEDEF
19-
GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod())
19+
GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod(), slicer=nothing)
2020
2121
The generic tensor network that generated from a [`ConstraintSatisfactionProblem`](@ref).
2222
2323
Positional arguments
2424
-------------------------------
25-
* `problem` is the graph problem.
26-
* `code` is the tensor network contraction code.
27-
* `fixedvertices` is a dictionary specifying the fixed dimensions.
25+
- `problem` is the constraint satisfaction problem.
26+
27+
Keyword arguments
28+
-------------------------------
29+
- `openvertices` is a vector of open indices, which are the degrees of freedoms that appears in the output tensor.
30+
- `fixedvertices` is a dictionary specifying the fixed degrees of freedom. For example, If I want to fix the variable `5` to be 0, I can set `fixedvertices = Dict(5 => 0)`.
31+
- `optimizer` is the contraction order optimizer for the generated tensor network.
32+
- `slicer` is the slicer for the tensor network, it can reduce the memory usage at the cost of computing time by slicing the tensor network.
33+
34+
For more information about contraction order optimization and slicing, please refer to the [OMEinsumContractionOrders documentation](https://tensorbfs.github.io/OMEinsumContractionOrders.jl/dev/).
2835
"""
2936
struct GenericTensorNetwork{CFG, CT, LT}
3037
problem::CFG
3138
code::CT
3239
fixedvertices::Dict{LT,Int}
3340
end
34-
function GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod())
41+
function GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod(), slicer=nothing)
3542
rcode = rawcode(problem; openvertices)
36-
code = _optimize_code(rcode, uniformsize_fix(rcode, num_flavors(problem), fixedvertices), optimizer, MergeVectors())
43+
code = _optimize_code(rcode, uniformsize_fix(rcode, num_flavors(problem), fixedvertices), optimizer, MergeVectors(), slicer)
3744
return GenericTensorNetwork(problem, code, Dict{labeltype(code),Int}(fixedvertices))
3845
end
3946
# a unified interface to optimize the contraction code
40-
_optimize_code(code, size_dict, optimizer::Nothing, simplifier) = code
41-
_optimize_code(code, size_dict, optimizer, simplifier) = optimize_code(code, size_dict, optimizer, simplifier)
47+
_optimize_code(code, size_dict, optimizer::Nothing, simplifier, slicer) = code
48+
_optimize_code(code, size_dict, optimizer, simplifier, slicer) = optimize_code(code, size_dict, optimizer; simplifier, slicer)
4249

4350
function Base.show(io::IO, tn::GenericTensorNetwork)
4451
println(io, "$(typeof(tn))")

test/fileio.jl

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,3 +51,30 @@ end
5151
@test ma == tree
5252
end
5353

54+
@testset "save load GenericTensorNetwork" begin
55+
g = smallgraph(:petersen)
56+
problem = IndependentSet(g, UnitWeight(10))
57+
tn = GenericTensorNetwork(problem; fixedvertices=Dict(1=>0, 2=>1))
58+
folder = tempname()
59+
save_tensor_network(tn; folder=folder)
60+
tn2 = load_tensor_network(folder)
61+
@test tn.problem == tn2.problem
62+
@test tn.code == tn2.code
63+
@test tn.fixedvertices == tn2.fixedvertices
64+
@test solve(tn, SizeMax()) == solve(tn2, SizeMax())
65+
66+
# test with empty fixedvertices
67+
tn3 = GenericTensorNetwork(problem)
68+
folder2 = tempname()
69+
save_tensor_network(tn3; folder=folder2)
70+
tn4 = load_tensor_network(folder2)
71+
@test tn3.problem == tn4.problem
72+
@test tn3.code == tn4.code
73+
@test tn3.fixedvertices == tn4.fixedvertices
74+
75+
# test error cases
76+
empty_folder = tempname()
77+
mkpath(empty_folder)
78+
@test_throws SystemError load_tensor_network(empty_folder)
79+
end
80+

test/interfaces.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ end
8686

8787
@testset "slicing" begin
8888
g = Graphs.smallgraph("petersen")
89-
gp = GenericTensorNetwork(IndependentSet(g), optimizer=TreeSA(nslices=5, ntrials=1))
89+
gp = GenericTensorNetwork(IndependentSet(g), optimizer=TreeSA(ntrials=1), slicer=TreeSASlicer(score=ScoreFunction(sc_target=2)))
9090
res1 = solve(gp, SizeMax())[]
9191
res2 = solve(gp, CountingAll())[]
9292
res3 = solve(gp, CountingMax(Single))[]
@@ -278,4 +278,4 @@ end
278278
graph = UnitDiskGraph(fullerene(), sqrt(5))
279279
spin_glass = SpinGlass(graph, UnitWeight(ne(graph)), zeros(Int, nv(graph)))
280280
@test log(solve(spin_glass, PartitionFunction(1.0))[])/nv(graph) 1.3073684577607942
281-
end
281+
end

0 commit comments

Comments
 (0)