diff --git a/.gitignore b/.gitignore index d9695cb..f1cbd1f 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,8 @@ docs/src_stash/test1_files/figure-commonmark/fig-limits-output-1.svg docs/src_stash/*_files docs/src_stash/*.md docs/src_stash/*.html +docs/src/tutorials/intermediate/clustered_sites_probo_allsites.jld2 + +projects/clustered_sites/intermediate/* + +build/* diff --git a/Project.toml b/Project.toml index 7a5a80d..e738ec1 100644 --- a/Project.toml +++ b/Project.toml @@ -7,15 +7,18 @@ version = "0.2.0" Bijectors = "76274a88-744f-5084-9051-94815aaf08c4" BlockDiagonals = "0a1fb500-61f7-11e9-3c65-f5ef3456f9f0" ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +Clustering = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5" Combinatorics = "861a8166-3701-5b0c-9a16-15d98fcdc6aa" CommonSolve = "38540f10-b2f7-11e9-35d8-d573e4eb0ff2" ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" +Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" DistributionFits = "45214091-1ed4-4409-9bcf-fdb48a05e921" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" +IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e" KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" @@ -25,6 +28,7 @@ Missings = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2" Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" +PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3" StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" @@ -39,9 +43,12 @@ CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" Lux = "b2108857-7c20-44ae-9111-449ecde12c47" SimpleChains = "de6bee2f-e2f4-4ec7-b6ed-219cc6f6e9e5" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" + [extensions] HybridVariationalInferenceCUDAExt = "CUDA" +HybridVariationalInferenceDataFramesExt = "DataFrames" HybridVariationalInferenceFluxExt = "Flux" HybridVariationalInferenceLuxExt = "Lux" HybridVariationalInferenceSimpleChainsExt = "SimpleChains" @@ -51,16 +58,20 @@ Bijectors = "0.14, 0.15" BlockDiagonals = "0.1.42, 0.2" CUDA = "5.7" ChainRulesCore = "1.25" +Clustering = "0.15.8" Combinatorics = "1.0.2" CommonSolve = "0.2.4" ComponentArrays = "0.15.19" +DataFrames = "1.8.2" DifferentiationInterface = "0.6.54, 0.7" +Distances = "0.10.12" DistributionFits = "0.3.9" Distributions = "0.25.117" FillArrays = "1.13.0" Flux = "0.14, 0.15, 0.16" Functors = "0.4, 0.5" GPUArraysCore = "0.1, 0.2" +IterTools = "1.10.0" KernelAbstractions = "0.9.34" LinearAlgebra = "1.10" LogExpFunctions = "0.3.29" @@ -70,9 +81,10 @@ MLUtils = "0.4.5" Missings = "1.2.0" NaNMath = "1.1.3" Optimisers = "0.4.6" -Optimization = "3.11, 4" +Optimization = "5.3" +PDMats = "0.11.37" Random = "1.10.0" -SimpleChains = "0.4.8" +SimpleChains = "0.4.7" StableRNGs = "1.0.2" StaticArrays = "1.9.13" StatsBase = "0.34.4" diff --git a/_typos.toml b/_typos.toml index e6619d1..ff0a740 100644 --- a/_typos.toml +++ b/_typos.toml @@ -5,3 +5,6 @@ extend-exclude = ["docs/src_stash/"] SOM = "SOM" negLogLik = "negLogLik" Missings = "Missings" + +[default.extend-identifiers] +bse = "bse" diff --git a/docs/make.jl b/docs/make.jl index 9ff370c..5f3e884 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -25,6 +25,7 @@ makedocs(; "How to" => [ ".. use GPU" => "tutorials/lux_gpu.md", ".. specify log-Likelihood" => "tutorials/logden_user.md", + ".. specify penalties" => "tutorials/penalty.md", ".. model independent parameters" => "tutorials/blocks_corr.md", ".. model site-global corr" => "tutorials/corr_site_global.md", ], diff --git a/docs/src/tutorials/Manifest-v1.11.toml b/docs/src/tutorials/Manifest-v1.11.toml index 4f4e554..403e39b 100644 --- a/docs/src/tutorials/Manifest-v1.11.toml +++ b/docs/src/tutorials/Manifest-v1.11.toml @@ -26,6 +26,30 @@ weakdeps = ["ChainRulesCore", "Test"] AbstractFFTsChainRulesCoreExt = "ChainRulesCore" AbstractFFTsTestExt = "Test" +[[deps.AbstractMCMC]] +deps = ["BangBang", "ConsoleProgressMonitor", "Dates", "Distributed", "LogDensityProblems", "Logging", "LoggingExtras", "ProgressLogging", "Random", "StatsBase", "TerminalLoggers", "UUIDs"] +git-tree-sha1 = "8ac6182431567907e0d5170bcac6dd48fa541f78" +uuid = "80f14c24-f653-4e6a-9b94-39d6b0f70001" +version = "5.15.1" + + [deps.AbstractMCMC.extensions] + AbstractMCMCOnlineStatsExt = "OnlineStats" + AbstractMCMCTensorBoardLoggerExt = "TensorBoardLogger" + + [deps.AbstractMCMC.weakdeps] + OnlineStats = "a15396b6-48d5-5d58-9928-6d29437db91e" + TensorBoardLogger = "899adc3e-224a-11e9-021f-63837185c80f" + +[[deps.AbstractPPL]] +deps = ["AbstractMCMC", "Accessors", "BangBang", "DensityInterface", "JSON", "LinearAlgebra", "MacroTools", "OrderedCollections", "Random", "StatsBase"] +git-tree-sha1 = "e7be2de9646c1f61332de9f1e32c7dedf1e00831" +uuid = "7a57a42e-76ec-4ea3-a279-07e840d6d9cf" +version = "0.14.2" +weakdeps = ["Distributions"] + + [deps.AbstractPPL.extensions] + AbstractPPLDistributionsExt = ["Distributions", "LinearAlgebra"] + [[deps.AbstractTrees]] git-tree-sha1 = "2d9c9a55f9c93e8887ad391fbae72f8ef55e1177" uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" @@ -33,9 +57,9 @@ version = "0.4.5" [[deps.Accessors]] deps = ["CompositionsBase", "ConstructionBase", "Dates", "InverseFunctions", "MacroTools"] -git-tree-sha1 = "856ecd7cebb68e5fc87abecd2326ad59f0f911f3" +git-tree-sha1 = "2eeb2c9bef11013efc6f8f97f32ee59b146b09fb" uuid = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697" -version = "0.1.43" +version = "0.1.44" [deps.Accessors.extensions] AxisKeysExt = "AxisKeys" @@ -57,9 +81,9 @@ version = "0.1.43" [[deps.Adapt]] deps = ["LinearAlgebra", "Requires"] -git-tree-sha1 = "7e35fca2bdfba44d797c53dfe63a51fabf39bfc0" +git-tree-sha1 = "35ea197a51ce46fcd01c4a44befce0578a1aaeca" uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -version = "4.4.0" +version = "4.5.0" weakdeps = ["SparseArrays", "StaticArrays"] [deps.Adapt.extensions] @@ -94,11 +118,12 @@ version = "1.1.2" [[deps.ArrayInterface]] deps = ["Adapt", "LinearAlgebra"] -git-tree-sha1 = "d81ae5489e13bc03567d4fbbb06c546a5e53c857" +git-tree-sha1 = "78b3a7a536b4b0a747a0f296ea77091ca0a9f9a3" uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" -version = "7.22.0" +version = "7.23.0" [deps.ArrayInterface.extensions] + ArrayInterfaceAMDGPUExt = "AMDGPU" ArrayInterfaceBandedMatricesExt = "BandedMatrices" ArrayInterfaceBlockBandedMatricesExt = "BlockBandedMatrices" ArrayInterfaceCUDAExt = "CUDA" @@ -113,6 +138,7 @@ version = "7.22.0" ArrayInterfaceTrackerExt = "Tracker" [deps.ArrayInterface.weakdeps] + AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" BandedMatrices = "aae01518-5342-5314-be14-df237901396f" BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" @@ -132,9 +158,9 @@ version = "1.11.0" [[deps.Atomix]] deps = ["UnsafeAtomics"] -git-tree-sha1 = "29bb0eb6f578a587a49da16564705968667f5fa8" +git-tree-sha1 = "b8651b2eb5796a386b0398a20b519a6a6150f75c" uuid = "a9b6321e-bd34-4604-b9c9-b65b8de01458" -version = "1.1.2" +version = "1.1.3" [deps.Atomix.extensions] AtomixCUDAExt = "CUDA" @@ -174,9 +200,9 @@ version = "0.6.1" [[deps.BangBang]] deps = ["Accessors", "ConstructionBase", "InitialValues", "LinearAlgebra"] -git-tree-sha1 = "308d82aa3d83140909590aa5a7824540944f110f" +git-tree-sha1 = "cceb62468025be98d42a5dc581b163c20896b040" uuid = "198e06fe-97b7-11e9-32a5-e1d131e6ad66" -version = "0.4.8" +version = "0.4.9" [deps.BangBang.extensions] BangBangChainRulesCoreExt = "ChainRulesCore" @@ -209,14 +235,13 @@ uuid = "9718e550-a3fa-408a-8086-8db961cd8217" version = "0.1.1" [[deps.Bijectors]] -deps = ["ArgCheck", "ChainRulesCore", "ChangesOfVariables", "Distributions", "DocStringExtensions", "Functors", "InverseFunctions", "IrrationalConstants", "LinearAlgebra", "LogExpFunctions", "MappedArrays", "Random", "Reexport", "Roots", "SparseArrays", "Statistics"] -git-tree-sha1 = "52f3f101c0c541145da25fba9805f3ef076f2d96" +deps = ["AbstractPPL", "ArgCheck", "ChainRulesCore", "ChangesOfVariables", "DifferentiationInterface", "Distributions", "DocStringExtensions", "EnzymeCore", "FillArrays", "Functors", "InverseFunctions", "IrrationalConstants", "LinearAlgebra", "LogExpFunctions", "MappedArrays", "Random", "Reexport", "Roots", "SparseArrays", "Statistics", "Test"] +git-tree-sha1 = "d6ee8f89dd20f933fbdad578a798e32babb617ee" uuid = "76274a88-744f-5084-9051-94815aaf08c4" -version = "0.15.16" +version = "0.15.20" [deps.Bijectors.extensions] BijectorsDistributionsADExt = "DistributionsAD" - BijectorsEnzymeCoreExt = "EnzymeCore" BijectorsForwardDiffExt = "ForwardDiff" BijectorsLazyArraysExt = "LazyArrays" BijectorsMooncakeExt = "Mooncake" @@ -226,7 +251,6 @@ version = "0.15.16" [deps.Bijectors.weakdeps] ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" - EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LazyArrays = "5078a376-72f3-5289-bfd5-ec5146d43c02" Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" @@ -287,10 +311,10 @@ uuid = "4e9b3aee-d8a1-5a3d-ad8b-7d824db253f0" version = "1.0.1+0" [[deps.CUDA]] -deps = ["AbstractFFTs", "Adapt", "BFloat16s", "CEnum", "CUDA_Compiler_jll", "CUDA_Driver_jll", "CUDA_Runtime_Discovery", "CUDA_Runtime_jll", "Crayons", "DataFrames", "ExprTools", "GPUArrays", "GPUCompiler", "GPUToolbox", "KernelAbstractions", "LLVM", "LLVMLoopInfo", "LazyArtifacts", "Libdl", "LinearAlgebra", "Logging", "NVTX", "Preferences", "PrettyTables", "Printf", "Random", "Random123", "RandomNumbers", "Reexport", "SparseArrays", "StaticArrays", "Statistics", "demumble_jll"] -git-tree-sha1 = "3fe1fb600b6ec029697416d5851ef0661c538f20" +deps = ["AbstractFFTs", "Adapt", "BFloat16s", "CEnum", "CUDA_Compiler_jll", "CUDA_Driver_jll", "CUDA_Runtime_Discovery", "CUDA_Runtime_jll", "Crayons", "ExprTools", "GPUArrays", "GPUCompiler", "GPUToolbox", "KernelAbstractions", "LLVM", "LLVMLoopInfo", "LazyArtifacts", "Libdl", "LinearAlgebra", "Logging", "NVTX", "Preferences", "PrettyTables", "Printf", "Random", "Random123", "RandomNumbers", "Reexport", "SparseArrays", "StaticArrays", "Statistics", "demumble_jll"] +git-tree-sha1 = "ea6a2ab8307059b6c9ea186ff7dfcd032a13b731" uuid = "052768ef-5323-5732-b1bb-66c8b64840ba" -version = "5.9.6" +version = "5.11.0" [deps.CUDA.extensions] ChainRulesCoreExt = "ChainRulesCore" @@ -306,15 +330,15 @@ version = "5.9.6" [[deps.CUDA_Compiler_jll]] deps = ["Artifacts", "CUDA_Driver_jll", "CUDA_Runtime_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] -git-tree-sha1 = "e547b2202721853ec06c6d9a71c87426419ba765" +git-tree-sha1 = "8c19e97de5b7574672e4a7a3abd55714ad66d59a" uuid = "d1e2174e-dfdc-576e-b43e-73b79eb1aca8" -version = "0.4.1+1" +version = "0.4.2+0" [[deps.CUDA_Driver_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "23bf4e60006b78544f753880fbcf1aa158a7669c" +deps = ["Artifacts", "JLLWrappers", "Libdl", "TOML"] +git-tree-sha1 = "061f39cc84e99928830aa1005d79f7e99097ba28" uuid = "4ee394cb-3365-5eb0-8335-949819d2adfc" -version = "13.1.0+2" +version = "13.2.0+0" [[deps.CUDA_Runtime_Discovery]] deps = ["Libdl"] @@ -324,15 +348,15 @@ version = "1.0.0" [[deps.CUDA_Runtime_jll]] deps = ["Artifacts", "CUDA_Driver_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] -git-tree-sha1 = "92cd84e2b760e471d647153ea5efc5789fc5e8b2" +git-tree-sha1 = "af17d37b5b8b4d7525f8902eba1ef6141a9a7d3b" uuid = "76a88914-d11a-5bdc-97e0-2f5a05c973a2" -version = "0.19.2+0" +version = "0.21.0+0" [[deps.CUDNN_jll]] deps = ["Artifacts", "CUDA_Runtime_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] -git-tree-sha1 = "dff9a69017686c95f1fd601c63c088244b3db75b" +git-tree-sha1 = "70dea6a7133d2100a143b515a00d6d887e208500" uuid = "62b44479-cb7b-5706-934f-f13b2eb2e645" -version = "9.17.1+0" +version = "9.20.0+0" [[deps.Cairo]] deps = ["Cairo_jll", "Colors", "Glib_jll", "Graphics", "Libdl", "Pango_jll"] @@ -342,15 +366,15 @@ version = "1.1.1" [[deps.CairoMakie]] deps = ["CRC32c", "Cairo", "Cairo_jll", "Colors", "FileIO", "FreeType", "GeometryBasics", "LinearAlgebra", "Makie", "PrecompileTools"] -git-tree-sha1 = "5017d6849aff775febd36049f7d926a5fb6677ec" +git-tree-sha1 = "fa072933899aae6dc61dde934febed8254e66c6a" uuid = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" -version = "0.15.8" +version = "0.15.9" [[deps.Cairo_jll]] -deps = ["Artifacts", "Bzip2_jll", "CompilerSupportLibraries_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "JLLWrappers", "LZO_jll", "Libdl", "Pixman_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Zlib_jll", "libpng_jll"] -git-tree-sha1 = "a21c5464519504e41e0cbc91f0188e8ca23d7440" +deps = ["Artifacts", "Bzip2_jll", "CompilerSupportLibraries_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "JLLWrappers", "Libdl", "Pixman_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Zlib_jll", "libpng_jll"] +git-tree-sha1 = "d0efe2c6fdcdaa1c161d206aa8b933788397ec71" uuid = "83423d85-b0ee-5818-9007-b63ccbeb887a" -version = "1.18.5+1" +version = "1.18.6+0" [[deps.ChainRules]] deps = ["Adapt", "ChainRulesCore", "Compat", "Distributed", "GPUArraysCore", "IrrationalConstants", "LinearAlgebra", "Random", "RealDot", "SparseArrays", "SparseInverseSubset", "Statistics", "StructArrays", "SuiteSparse"] @@ -360,9 +384,9 @@ version = "1.73.0" [[deps.ChainRulesCore]] deps = ["Compat", "LinearAlgebra"] -git-tree-sha1 = "e4c6a16e77171a5f5e25e9646617ab1c276c5607" +git-tree-sha1 = "12177ad6b3cad7fd50c8b3825ce24a99ad61c18f" uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.26.0" +version = "1.26.1" weakdeps = ["SparseArrays"] [deps.ChainRulesCore.extensions] @@ -402,6 +426,12 @@ git-tree-sha1 = "05ba0d07cd4fd8b7a39541e31a7b0254704ea581" uuid = "fb6a15b2-703c-40df-9091-08a04967cfa9" version = "0.1.13" +[[deps.Clustering]] +deps = ["Distances", "LinearAlgebra", "NearestNeighbors", "Printf", "Random", "SparseArrays", "Statistics", "StatsBase"] +git-tree-sha1 = "3e22db924e2945282e70c33b75d4dde8bfa44c94" +uuid = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5" +version = "0.15.8" + [[deps.CodecZlib]] deps = ["TranscodingStreams", "Zlib_jll"] git-tree-sha1 = "962834c22b66e32aa10f7611c08c8ca4e20749a9" @@ -484,13 +514,14 @@ version = "1.1.1+0" [[deps.ComponentArrays]] deps = ["Adapt", "ArrayInterface", "ChainRulesCore", "ConstructionBase", "Functors", "LinearAlgebra", "StaticArrayInterface", "StaticArraysCore"] -git-tree-sha1 = "0d1b8b3d556d70a29ad515325cd2f5f4ed703e09" +git-tree-sha1 = "f9873e13c3f89808e518118951eb6b244f7af735" uuid = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" -version = "0.15.32" +version = "0.15.33" [deps.ComponentArrays.extensions] ComponentArraysGPUArraysExt = "GPUArrays" ComponentArraysKernelAbstractionsExt = "KernelAbstractions" + ComponentArraysMooncakeExt = "Mooncake" ComponentArraysOptimisersExt = "Optimisers" ComponentArraysReactantExt = "Reactant" ComponentArraysRecursiveArrayToolsExt = "RecursiveArrayTools" @@ -502,6 +533,7 @@ version = "0.15.32" [deps.ComponentArrays.weakdeps] GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2" Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" RecursiveArrayTools = "731186ca-8d62-57ce-b412-fbd966d074cd" @@ -521,9 +553,9 @@ weakdeps = ["InverseFunctions"] [[deps.ComputePipeline]] deps = ["Observables", "Preferences"] -git-tree-sha1 = "76dab592fa553e378f9dd8adea16fe2591aa3daa" +git-tree-sha1 = "3b4be73db165146d8a88e47924f464e55ab053cd" uuid = "95dc2771-c249-4cd0-9c9f-1f3b4330693c" -version = "0.1.6" +version = "0.1.7" [[deps.ConcreteStructs]] git-tree-sha1 = "f749037478283d372048690eb3b5f92a79432b34" @@ -532,9 +564,9 @@ version = "0.2.3" [[deps.ConcurrentUtilities]] deps = ["Serialization", "Sockets"] -git-tree-sha1 = "d9d26935a0bcffc87d2613ce14c527c99fc543fd" +git-tree-sha1 = "21d088c496ea22914fe80906eb5bce65755e5ec8" uuid = "f0e56b4a-5159-44fe-b623-3e5288b988bb" -version = "2.5.0" +version = "2.5.1" [[deps.ConsoleProgressMonitor]] deps = ["Logging", "ProgressMeter"] @@ -564,6 +596,18 @@ git-tree-sha1 = "439e35b0b36e2e5881738abc8857bd92ad6ff9a8" uuid = "d38c429a-6771-53c6-b99e-75d170b6e991" version = "0.6.3" +[[deps.CoreMath]] +deps = ["CoreMath_jll"] +git-tree-sha1 = "8c0480f92b1b1796239156a1b9b1bfb1b39499b4" +uuid = "b7a15901-be09-4a0e-87d2-2e66b0e09b5a" +version = "0.1.0" + +[[deps.CoreMath_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "a692a4c1dc59a4b8bc0b6403876eb3250fde2bc3" +uuid = "a38c48d9-6df1-5ac9-9223-b6ada3b5572b" +version = "0.1.0+0" + [[deps.CpuId]] deps = ["Markdown"] git-tree-sha1 = "fcbb72b032692610bfbdb15018ac16a36cf2e406" @@ -580,17 +624,11 @@ git-tree-sha1 = "abe83f3a2f1b857aac70ef8b269080af17764bbe" uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" version = "1.16.0" -[[deps.DataFrames]] -deps = ["Compat", "DataAPI", "DataStructures", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrecompileTools", "PrettyTables", "Printf", "Random", "Reexport", "SentinelArrays", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"] -git-tree-sha1 = "d8928e9169ff76c6281f39a659f9bca3a573f24c" -uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" -version = "1.8.1" - [[deps.DataStructures]] deps = ["OrderedCollections"] -git-tree-sha1 = "e357641bb3e0638d353c4b29ea0e40ea644066a6" +git-tree-sha1 = "e86f4a2805f7f19bec5129bc9150c38208e5dc23" uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.19.3" +version = "0.19.4" [[deps.DataValueInterfaces]] git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" @@ -625,6 +663,12 @@ git-tree-sha1 = "9e2f36d3c96a820c678f2f1f1782582fcf685bae" uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" version = "1.9.1" +[[deps.DensityInterface]] +deps = ["InverseFunctions", "Test"] +git-tree-sha1 = "80c3e8639e3353e5d2912fb3a1916b8455e2494b" +uuid = "b429d917-457f-4dbc-8f4c-0cc954292b1d" +version = "0.4.0" + [[deps.DiffResults]] deps = ["StaticArraysCore"] git-tree-sha1 = "782dd5f4561f5d267313f23853baaaa4c52ea621" @@ -698,6 +742,17 @@ weakdeps = ["ChainRulesCore", "EnzymeCore"] DispatchDoctorChainRulesCoreExt = "ChainRulesCore" DispatchDoctorEnzymeCoreExt = "EnzymeCore" +[[deps.Distances]] +deps = ["LinearAlgebra", "Statistics", "StatsAPI"] +git-tree-sha1 = "c7e3a542b999843086e2f29dac96a618c105be1d" +uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +version = "0.10.12" +weakdeps = ["ChainRulesCore", "SparseArrays"] + + [deps.Distances.extensions] + DistancesChainRulesCoreExt = "ChainRulesCore" + DistancesSparseArraysExt = "SparseArrays" + [[deps.Distributed]] deps = ["Random", "Serialization", "Sockets"] uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" @@ -720,17 +775,13 @@ deps = ["AliasTables", "FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadG git-tree-sha1 = "fbcc7610f6d8348428f722ecbe0e6cfe22e672c6" uuid = "31c24e10-a181-5473-b8eb-7969acd0382f" version = "0.25.123" +weakdeps = ["ChainRulesCore", "DensityInterface", "Test"] [deps.Distributions.extensions] DistributionsChainRulesCoreExt = "ChainRulesCore" DistributionsDensityInterfaceExt = "DensityInterface" DistributionsTestExt = "Test" - [deps.Distributions.weakdeps] - ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" - DensityInterface = "b429d917-457f-4dbc-8f4c-0cc954292b1d" - Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - [[deps.DocStringExtensions]] git-tree-sha1 = "7442a5dfe1ebb773c29cc2962a8980f47221d76c" uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" @@ -748,14 +799,14 @@ uuid = "5ae413db-bbd1-5e63-b57d-d24a61df00f5" version = "2.2.4+0" [[deps.EnumX]] -git-tree-sha1 = "7bebc8aad6ee6217c78c5ddcf7ed289d65d0263e" +git-tree-sha1 = "c49898e8438c828577f04b92fc9368c388ac783c" uuid = "4e289a0a-7415-4d19-859d-a7e5c4648b56" -version = "1.0.6" +version = "1.0.7" [[deps.EnzymeCore]] -git-tree-sha1 = "990991b8aa76d17693a98e3a915ac7aa49f08d1a" +git-tree-sha1 = "24bbb6fc8fb87eb71c1f8d00184a60fc22c63903" uuid = "f151be2c-9106-41f4-ab19-57ee4f262869" -version = "0.8.18" +version = "0.8.19" weakdeps = ["Adapt", "ChainRulesCore"] [deps.EnzymeCore.extensions] @@ -813,10 +864,10 @@ uuid = "c87230d0-a227-11e9-1b43-d7ebe4e7570a" version = "0.4.5" [[deps.FFMPEG_jll]] -deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"] -git-tree-sha1 = "01ba9d15e9eae375dc1eb9589df76b3572acd3f2" +deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libva_jll", "libvorbis_jll", "x264_jll", "x265_jll"] +git-tree-sha1 = "66381d7059b5f3f6162f28831854008040a4e905" uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5" -version = "8.0.1+0" +version = "8.0.1+1" [[deps.FFTA]] deps = ["AbstractFFTs", "DocStringExtensions", "LinearAlgebra", "MuladdMacro", "Primes", "Random", "Reexport"] @@ -914,9 +965,9 @@ version = "1.3.7" [[deps.ForwardDiff]] deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "LogExpFunctions", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions"] -git-tree-sha1 = "eef4c86803f47dcb61e9b8790ecaa96956fdd8ae" +git-tree-sha1 = "cddeab6487248a39dae1a960fff0ac17b2a28888" uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "1.3.2" +version = "1.3.3" weakdeps = ["StaticArrays"] [deps.ForwardDiff.extensions] @@ -930,9 +981,9 @@ version = "4.1.1" [[deps.FreeType2_jll]] deps = ["Artifacts", "Bzip2_jll", "JLLWrappers", "Libdl", "Zlib_jll"] -git-tree-sha1 = "2c5512e11c791d1baed2049c5652441b28fc6a31" +git-tree-sha1 = "70329abc09b886fd2c5d94ad2d9527639c421e3e" uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7" -version = "2.13.4+0" +version = "2.14.3+1" [[deps.FreeTypeAbstraction]] deps = ["BaseDirs", "ColorVectorSpace", "Colors", "FreeType", "GeometryBasics", "Mmap"] @@ -952,10 +1003,17 @@ uuid = "069b7b12-0de2-55c6-9aab-29f3d0a68a2e" version = "1.1.3" [[deps.FunctionWrappersWrappers]] -deps = ["FunctionWrappers"] -git-tree-sha1 = "b104d487b34566608f8b4e1c39fb0b10aa279ff8" +deps = ["FunctionWrappers", "PrecompileTools", "TruncatedStacktraces"] +git-tree-sha1 = "5201523536a43bf8aef3914b7f60b552b098ef8e" uuid = "77dc65aa-8811-40c2-897b-53d922fa7daf" -version = "0.1.3" +version = "1.1.0" + + [deps.FunctionWrappersWrappers.extensions] + FunctionWrappersWrappersEnzymeExt = ["Enzyme", "EnzymeCore"] + + [deps.FunctionWrappersWrappers.weakdeps] + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" [[deps.Functors]] deps = ["Compat", "ConstructionBase", "LinearAlgebra", "Random"] @@ -992,21 +1050,21 @@ version = "0.2.0" [[deps.GPUCompiler]] deps = ["ExprTools", "InteractiveUtils", "LLVM", "Libdl", "Logging", "PrecompileTools", "Preferences", "Scratch", "Serialization", "TOML", "Tracy", "UUIDs"] -git-tree-sha1 = "966946d226e8b676ca6409454718accb18c34c54" +git-tree-sha1 = "fedfe5e7db7035271c3f58359007f971da1dde87" uuid = "61eb1bfa-7361-4325-ad38-22787b887f55" -version = "1.8.2" +version = "1.9.1" [[deps.GPUToolbox]] deps = ["LLVM"] -git-tree-sha1 = "9e9186b09a13b7f094f87d1a9bb266d8780e1b1c" +git-tree-sha1 = "a589b6c1a0eff953571f5d8b0474f5020831114d" uuid = "096a3bc2-3ced-46d0-87f4-dd12716f4bfc" -version = "1.0.0" +version = "1.1.1" [[deps.GR]] deps = ["Artifacts", "Base64", "DelimitedFiles", "Downloads", "GR_jll", "HTTP", "JSON", "Libdl", "LinearAlgebra", "Preferences", "Printf", "Qt6Wayland_jll", "Random", "Serialization", "Sockets", "TOML", "Tar", "Test", "p7zip_jll"] -git-tree-sha1 = "ee0585b62671ce88e48d3409733230b401c9775c" +git-tree-sha1 = "44716a1a667cb867ee0e9ec8edc31c3e4aa5afdc" uuid = "28b8d3ca-fb5f-59d9-8090-bfdbd6d07a71" -version = "0.73.22" +version = "0.73.24" [deps.GR.extensions] IJuliaExt = "IJulia" @@ -1016,9 +1074,9 @@ version = "0.73.22" [[deps.GR_jll]] deps = ["Artifacts", "Bzip2_jll", "Cairo_jll", "FFMPEG_jll", "Fontconfig_jll", "FreeType2_jll", "GLFW_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "Pixman_jll", "Qt6Base_jll", "Zlib_jll", "libpng_jll"] -git-tree-sha1 = "7dd7173f7129a1b6f84e0f03e0890cd1189b0659" +git-tree-sha1 = "be8a1b8065959e24fdc1b51402f39f3b6f0f6653" uuid = "d2c73de3-f751-5644-a686-071e5b155ba9" -version = "0.73.22+0" +version = "0.73.24+0" [[deps.GeometryBasics]] deps = ["EarCut_jll", "Extents", "IterTools", "LinearAlgebra", "PrecompileTools", "Random", "StaticArrays"] @@ -1081,9 +1139,9 @@ version = "1.0.2" [[deps.HTTP]] deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "PrecompileTools", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"] -git-tree-sha1 = "5e6fe50ae7f23d171f44e311c2960294aaa0beb5" +git-tree-sha1 = "51059d23c8bb67911a2e6fd5130229113735fc7e" uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" -version = "1.10.19" +version = "1.11.0" [[deps.HarfBuzz_jll]] deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll"] @@ -1103,7 +1161,7 @@ uuid = "3e5b6fbb-0976-4d2c-9146-d79de83f2fb0" version = "0.1.18" [[deps.HybridVariationalInference]] -deps = ["Bijectors", "BlockDiagonals", "ChainRulesCore", "Combinatorics", "CommonSolve", "ComponentArrays", "DifferentiationInterface", "DistributionFits", "Distributions", "FillArrays", "Functors", "GPUArraysCore", "KernelAbstractions", "LinearAlgebra", "LogExpFunctions", "MLDataDevices", "MLUtils", "Missings", "NaNMath", "Optimisers", "Optimization", "Random", "StableRNGs", "StaticArrays", "StatsBase", "StatsFuns", "Test", "UnPack", "Zygote"] +deps = ["Bijectors", "BlockDiagonals", "ChainRulesCore", "Clustering", "Combinatorics", "CommonSolve", "ComponentArrays", "DifferentiationInterface", "Distances", "DistributionFits", "Distributions", "FillArrays", "Functors", "GPUArraysCore", "IterTools", "KernelAbstractions", "LinearAlgebra", "LogExpFunctions", "MLDataDevices", "MLUtils", "Missings", "NaNMath", "Optimisers", "Optimization", "PDMats", "Random", "StableRNGs", "StaticArrays", "StatsBase", "StatsFuns", "Test", "UnPack", "Zygote"] path = "../../.." uuid = "a108c475-a4e2-4021-9a84-cfa7df242f64" version = "0.2.0" @@ -1188,19 +1246,6 @@ git-tree-sha1 = "4da0f88e9a39111c2fa3add390ab15f3a44f3ca3" uuid = "22cec73e-a1b8-11e9-2c92-598750a2cf9c" version = "0.3.1" -[[deps.InlineStrings]] -git-tree-sha1 = "8f3d257792a522b4601c24a577954b0a8cd7334d" -uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48" -version = "1.4.5" - - [deps.InlineStrings.extensions] - ArrowTypesExt = "ArrowTypes" - ParsersExt = "Parsers" - - [deps.InlineStrings.weakdeps] - ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd" - Parsers = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" - [[deps.IntegerMathUtils]] git-tree-sha1 = "4c1acff2dc6b6967e7e750633c50bc3b8d83e617" uuid = "18e54dd8-cb9d-406c-a71d-865a43cbb235" @@ -1223,16 +1268,17 @@ weakdeps = ["ForwardDiff", "Unitful"] InterpolationsUnitfulExt = "Unitful" [[deps.IntervalArithmetic]] -deps = ["CRlibm", "MacroTools", "OpenBLASConsistentFPCSR_jll", "Printf", "Random", "RoundingEmulator"] -git-tree-sha1 = "02b61501dbe6da3b927cc25dacd7ce32390ee970" +deps = ["CRlibm", "CoreMath", "MacroTools", "OpenBLASConsistentFPCSR_jll", "Printf", "Random", "RoundingEmulator"] +git-tree-sha1 = "f1c42fcaca2d8034fe392f3e86c2e0809f75b2a1" uuid = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253" -version = "1.0.2" +version = "1.0.6" [deps.IntervalArithmetic.extensions] IntervalArithmeticArblibExt = "Arblib" IntervalArithmeticDiffRulesExt = "DiffRules" IntervalArithmeticForwardDiffExt = "ForwardDiff" IntervalArithmeticIntervalSetsExt = "IntervalSets" + IntervalArithmeticIrrationalConstantsExt = "IrrationalConstants" IntervalArithmeticLinearAlgebraExt = "LinearAlgebra" IntervalArithmeticRecipesBaseExt = "RecipesBase" IntervalArithmeticSparseArraysExt = "SparseArrays" @@ -1242,14 +1288,15 @@ version = "1.0.2" DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" IntervalSets = "8197267c-284f-5f27-9208-e0e47529a953" + IrrationalConstants = "92d709cd-6900-40b7-9082-c6be49f344b6" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" [[deps.IntervalSets]] -git-tree-sha1 = "d966f85b3b7a8e49d034d27a189e9a4874b4391a" +git-tree-sha1 = "79d6bd28c8d9bccc2229784f1bd637689b256377" uuid = "8197267c-284f-5f27-9208-e0e47529a953" -version = "0.7.13" +version = "0.7.14" weakdeps = ["Random", "RecipesBase", "Statistics"] [deps.IntervalSets.extensions] @@ -1267,11 +1314,6 @@ weakdeps = ["Dates", "Test"] InverseFunctionsDatesExt = "Dates" InverseFunctionsTestExt = "Test" -[[deps.InvertedIndices]] -git-tree-sha1 = "6da3c4316095de0f5ee2ebd875df8721e7e0bdbe" -uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f" -version = "1.3.1" - [[deps.IrrationalConstants]] git-tree-sha1 = "b2d91fe939cae05960e760110b328288867b5758" uuid = "92d709cd-6900-40b7-9082-c6be49f344b6" @@ -1295,9 +1337,9 @@ version = "1.0.0" [[deps.JLD2]] deps = ["ChunkCodecLibZlib", "ChunkCodecLibZstd", "FileIO", "MacroTools", "Mmap", "OrderedCollections", "PrecompileTools", "ScopedValues"] -git-tree-sha1 = "8f8ff711442d1f4cfc0d86133e7ee03d62ec9b98" +git-tree-sha1 = "941f87a0ae1b14d1ac2fa57245425b23a9d7a516" uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819" -version = "0.6.3" +version = "0.6.4" weakdeps = ["UnPack"] [deps.JLD2.extensions] @@ -1317,9 +1359,9 @@ version = "1.7.1" [[deps.JSON]] deps = ["Dates", "Logging", "Parsers", "PrecompileTools", "StructUtils", "UUIDs", "Unicode"] -git-tree-sha1 = "b3ad4a0255688dcb895a52fafbaae3023b588a90" +git-tree-sha1 = "67c6f1f085cb2671c93fe34244c9cccde30f7a26" uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -version = "1.4.0" +version = "1.5.0" [deps.JSON.extensions] JSONArrowExt = ["ArrowTypes"] @@ -1359,9 +1401,9 @@ version = "0.2.4" [[deps.KernelAbstractions]] deps = ["Adapt", "Atomix", "InteractiveUtils", "MacroTools", "PrecompileTools", "Requires", "StaticArrays", "UUIDs"] -git-tree-sha1 = "fb14a863240d62fbf5922bf9f8803d7df6c62dc8" +git-tree-sha1 = "f2e76d3ced51a2a9e185abc0b97494c7273f649f" uuid = "63c18a36-062a-441e-b654-da1e3ab1ce7c" -version = "0.9.40" +version = "0.9.41" weakdeps = ["EnzymeCore", "LinearAlgebra", "SparseArrays"] [deps.KernelAbstractions.extensions] @@ -1381,12 +1423,6 @@ git-tree-sha1 = "059aabebaa7c82ccb853dd4a0ee9d17796f7e1bc" uuid = "c1c5ebd0-6772-5130-a774-d5fcae4a789d" version = "3.100.3+0" -[[deps.LBFGSB]] -deps = ["L_BFGS_B_jll"] -git-tree-sha1 = "e2e6f53ee20605d0ea2be473480b7480bd5091b5" -uuid = "5be7bae1-8223-5378-bac3-9e7378a2f6e6" -version = "0.4.1" - [[deps.LERC_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] git-tree-sha1 = "aaafe88dccbd957a8d82f7d05be9b69172e0cee3" @@ -1420,18 +1456,6 @@ git-tree-sha1 = "eb62a3deb62fc6d8822c0c4bef73e4412419c5d8" uuid = "1d63c593-3942-5779-bab2-d838dc0a180e" version = "18.1.8+0" -[[deps.LZO_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "1c602b1127f4751facb671441ca72715cc95938a" -uuid = "dd4b983a-f0e5-5f8d-a1b7-129d4a5fb1ac" -version = "2.10.3+0" - -[[deps.L_BFGS_B_jll]] -deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "77feda930ed3f04b2b0fbb5bea89e69d3677c6b0" -uuid = "81d17ec3-03a1-5e46-b53e-bddc35a13473" -version = "3.0.1+0" - [[deps.LaTeXStrings]] git-tree-sha1 = "dda21b8cbd6a6c40d9d02a73230f9d70fed6918c" uuid = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f" @@ -1553,6 +1577,12 @@ deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"] uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" version = "1.11.0" +[[deps.LogDensityProblems]] +deps = ["ArgCheck", "DocStringExtensions", "Random"] +git-tree-sha1 = "d9625f27ded4ad726ceca7819394a4cc77ed25b3" +uuid = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" +version = "2.2.0" + [[deps.LogExpFunctions]] deps = ["DocStringExtensions", "IrrationalConstants", "LinearAlgebra"] git-tree-sha1 = "13ca9e2586b89836fd20cccf56e57e2b9ae7f38f" @@ -1659,9 +1689,9 @@ version = "1.5.3" [[deps.LuxLib]] deps = ["ArrayInterface", "CPUSummary", "ChainRulesCore", "DispatchDoctor", "EnzymeCore", "FastClosures", "Functors", "KernelAbstractions", "LinearAlgebra", "LuxCore", "MLDataDevices", "Markdown", "NNlib", "Preferences", "Random", "Reexport", "SciMLPublic", "Static", "StaticArraysCore", "Statistics", "UUIDs"] -git-tree-sha1 = "d93ed9031e8609a63dcd7f158f8565f93a0ab61e" +git-tree-sha1 = "77f3257b18e9fedd39b7b7990f0d3a0800a834ae" uuid = "82251201-b29d-42c6-8e01-566dec8acb11" -version = "1.15.4" +version = "1.15.6" [deps.LuxLib.extensions] AppleAccelerateExt = "AppleAccelerate" @@ -1702,9 +1732,9 @@ version = "1.15.4" [[deps.MCMCDiagnosticTools]] deps = ["AbstractFFTs", "DataAPI", "DataStructures", "Distributions", "LinearAlgebra", "MLJModelInterface", "Random", "SpecialFunctions", "Statistics", "StatsBase", "StatsFuns", "Tables"] -git-tree-sha1 = "f90494689e927268dec7bbd1ece64f134ad251f4" +git-tree-sha1 = "2f464b68e84673727b4e4216a6254fba7da5cf4e" uuid = "be115224-59cd-429b-ad48-344e309966f0" -version = "0.3.16" +version = "0.3.17" [[deps.MLCore]] deps = ["DataAPI", "SimpleTraits", "Tables"] @@ -1714,9 +1744,9 @@ version = "1.0.0" [[deps.MLDataDevices]] deps = ["Adapt", "Functors", "Preferences", "Random", "SciMLPublic"] -git-tree-sha1 = "d8ab79840174b85db64214d4140d4be0a9270210" +git-tree-sha1 = "39a69ca451c3e78b9a6a2e42ef894fdf7505e629" uuid = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" -version = "1.17.4" +version = "1.17.5" [deps.MLDataDevices.extensions] AMDGPUExt = "AMDGPU" @@ -1784,9 +1814,9 @@ version = "0.5.16" [[deps.Makie]] deps = ["Animations", "Base64", "CRC32c", "ColorBrewer", "ColorSchemes", "ColorTypes", "Colors", "ComputePipeline", "Contour", "Dates", "DelaunayTriangulation", "Distributions", "DocStringExtensions", "Downloads", "FFMPEG_jll", "FileIO", "FilePaths", "FixedPointNumbers", "Format", "FreeType", "FreeTypeAbstraction", "GeometryBasics", "GridLayoutBase", "ImageBase", "ImageIO", "InteractiveUtils", "Interpolations", "IntervalSets", "InverseFunctions", "Isoband", "KernelDensity", "LaTeXStrings", "LinearAlgebra", "MacroTools", "Markdown", "MathTeXEngine", "Observables", "OffsetArrays", "PNGFiles", "Packing", "Pkg", "PlotUtils", "PolygonOps", "PrecompileTools", "Printf", "REPL", "Random", "RelocatableFolders", "Scratch", "ShaderAbstractions", "Showoff", "SignedDistanceFields", "SparseArrays", "Statistics", "StatsBase", "StatsFuns", "StructArrays", "TriplotBase", "UnicodeFun", "Unitful"] -git-tree-sha1 = "d1b974f376c24dad02c873e951c5cd4e351cd7c2" +git-tree-sha1 = "68af66ec16af8b152309310251ecb4fbfe39869f" uuid = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a" -version = "0.24.8" +version = "0.24.9" [deps.Makie.extensions] MakieDynamicQuantitiesExt = "DynamicQuantities" @@ -1817,9 +1847,9 @@ version = "0.6.7" [[deps.MbedTLS]] deps = ["Dates", "MbedTLS_jll", "MozillaCACerts_jll", "NetworkOptions", "Random", "Sockets"] -git-tree-sha1 = "c067a280ddc25f196b5e7df3877c6b226d390aaf" +git-tree-sha1 = "8785729fa736197687541f7053f6d8ab7fc44f92" uuid = "739be429-bea8-5141-9913-cc70e7f3736d" -version = "1.1.9" +version = "1.1.10" [[deps.MbedTLS_jll]] deps = ["Artifacts", "Libdl"] @@ -1922,6 +1952,12 @@ git-tree-sha1 = "1a0fa0e9613f46c9b8c11eee38ebb4f590013c5e" uuid = "71a1bf82-56d0-4bbc-8a3c-48b961074391" version = "0.1.5" +[[deps.NearestNeighbors]] +deps = ["AbstractTrees", "Distances", "StaticArrays"] +git-tree-sha1 = "e2c3bba08dd6dedfe17a17889131b885b8c082f0" +uuid = "b8a86587-4115-5ab1-83bc-aa920d37bbce" +version = "0.4.27" + [[deps.Netpbm]] deps = ["FileIO", "ImageCore", "ImageMetadata"] git-tree-sha1 = "d92b107dbb887293622df7697a2223f9f8176fcd" @@ -1971,9 +2007,9 @@ version = "0.3.3" [[deps.OpenEXR_jll]] deps = ["Artifacts", "Imath_jll", "JLLWrappers", "Libdl", "Zlib_jll"] -git-tree-sha1 = "df9b7c88c2e7a2e77146223c526bf9e236d5f450" +git-tree-sha1 = "135492b7e97fc86d9b132b96a54d2d3dd3e0c6a8" uuid = "18a262bb-aa17-5467-a713-aee519bc75cb" -version = "3.4.4+0" +version = "3.4.8+0" [[deps.OpenLibm_jll]] deps = ["Artifacts", "Libdl"] @@ -1988,9 +2024,9 @@ version = "1.6.1" [[deps.OpenSSL_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "c9cbeda6aceffc52d8a0017e71db27c7a7c0beaf" +git-tree-sha1 = "2ac022577e5eac7da040de17776d51bb770cd895" uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" -version = "3.5.5+0" +version = "3.5.6+0" [[deps.OpenSpecFun_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl"] @@ -2015,44 +2051,46 @@ version = "0.4.7" Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" [[deps.Optimization]] -deps = ["ADTypes", "ArrayInterface", "ConsoleProgressMonitor", "DocStringExtensions", "LBFGSB", "LinearAlgebra", "Logging", "LoggingExtras", "OptimizationBase", "Printf", "ProgressLogging", "Random", "Reexport", "SciMLBase", "SparseArrays", "TerminalLoggers"] -git-tree-sha1 = "fa2449ce34cc1d7b2191bad4d3356a5376412288" +deps = ["ADTypes", "ArrayInterface", "ConsoleProgressMonitor", "DocStringExtensions", "LinearAlgebra", "Logging", "LoggingExtras", "OptimizationBase", "Printf", "Reexport", "SciMLBase", "SparseArrays", "TerminalLoggers"] +git-tree-sha1 = "2c409c814c2d745620fdd55391a66ee514561146" uuid = "7f7a1694-90dd-40f0-9382-eb1efda571ba" -version = "4.8.0" +version = "5.5.0" [[deps.OptimizationBase]] -deps = ["ADTypes", "ArrayInterface", "DifferentiationInterface", "DocStringExtensions", "FastClosures", "LinearAlgebra", "PDMats", "Reexport", "SciMLBase", "SparseArrays", "SparseConnectivityTracer", "SparseMatrixColorings"] -git-tree-sha1 = "a4d72b85ec4b0ef7aca1a1e067406fcc673addae" +deps = ["ADTypes", "ArrayInterface", "DifferentiationInterface", "DocStringExtensions", "FastClosures", "LinearAlgebra", "PDMats", "PrecompileTools", "Reexport", "SciMLBase", "SciMLLogging", "SparseArrays", "SparseConnectivityTracer", "SparseMatrixColorings", "SymbolicIndexingInterface"] +git-tree-sha1 = "a3d7837832e515111c95a02df7dc55edbdf17d8a" uuid = "bca83a33-5cc9-4baa-983d-23429ab6bcbb" -version = "2.14.0" +version = "5.1.0" [deps.OptimizationBase.extensions] - OptimizationEnzymeExt = "Enzyme" + OptimizationChainRulesCoreExt = "ChainRulesCore" + OptimizationEnzymeExt = ["ChainRulesCore", "Enzyme"] OptimizationFiniteDiffExt = "FiniteDiff" OptimizationForwardDiffExt = "ForwardDiff" OptimizationMLDataDevicesExt = "MLDataDevices" OptimizationMLUtilsExt = "MLUtils" - OptimizationMTKExt = "ModelingToolkit" + OptimizationMooncakeExt = "Mooncake" OptimizationReverseDiffExt = "ReverseDiff" OptimizationSymbolicAnalysisExt = "SymbolicAnalysis" OptimizationZygoteExt = "Zygote" [deps.OptimizationBase.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" - ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [[deps.OptimizationOptimisers]] -deps = ["Optimisers", "Optimization", "Printf", "ProgressLogging", "Reexport"] -git-tree-sha1 = "ea6169605fe93e02df87874f388279ae766175d9" +deps = ["Logging", "Optimisers", "OptimizationBase", "Reexport", "SciMLBase"] +git-tree-sha1 = "7caf4c41e3ee6d348381228b6517decea28867e3" uuid = "42dfb2eb-d2b4-4451-abcd-913932933ac1" -version = "0.3.11" +version = "0.3.16" [[deps.Opus_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] @@ -2161,9 +2199,9 @@ version = "1.4.4" [[deps.Plots]] deps = ["Base64", "Contour", "Dates", "Downloads", "FFMPEG", "FixedPointNumbers", "GR", "JLFzf", "JSON", "LaTeXStrings", "Latexify", "LinearAlgebra", "Measures", "NaNMath", "Pkg", "PlotThemes", "PlotUtils", "PrecompileTools", "Printf", "REPL", "Random", "RecipesBase", "RecipesPipeline", "Reexport", "RelocatableFolders", "Requires", "Scratch", "Showoff", "SparseArrays", "Statistics", "StatsBase", "TOML", "UUIDs", "UnicodeFun", "Unzip"] -git-tree-sha1 = "1cc8ad0762e59e713ee3ef28f9b78b2c9f4ca078" +git-tree-sha1 = "cb20a4eacda080e517e4deb9cfb6c7c518131265" uuid = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" -version = "1.41.5" +version = "1.41.6" [deps.Plots.extensions] FileIOExt = "FileIO" @@ -2196,17 +2234,11 @@ git-tree-sha1 = "77b3d3605fc1cd0b42d95eba87dfcd2bf67d5ff6" uuid = "647866c9-e3ac-4575-94e7-e3d426903924" version = "0.1.2" -[[deps.PooledArrays]] -deps = ["DataAPI", "Future"] -git-tree-sha1 = "36d8b4b899628fb92c2749eb488d884a926614d3" -uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720" -version = "1.4.3" - [[deps.PreallocationTools]] deps = ["Adapt", "ArrayInterface", "PrecompileTools"] -git-tree-sha1 = "dc8d6bde5005a0eac05ae8faf1eceaaca166cfa4" +git-tree-sha1 = "e16b73bf892c55d16d53c9c0dbd0fb31cb7e25da" uuid = "d236fae5-4411-538c-8e31-a6e3d9e00b46" -version = "1.1.2" +version = "1.2.0" [deps.PreallocationTools.extensions] PreallocationToolsForwardDiffExt = "ForwardDiff" @@ -2226,9 +2258,9 @@ version = "1.2.1" [[deps.Preferences]] deps = ["TOML"] -git-tree-sha1 = "522f093a29b31a93e34eaea17ba055d850edea28" +git-tree-sha1 = "8b770b60760d4451834fe79dd483e318eee709c4" uuid = "21216c6a-2e73-6563-6e65-726566657250" -version = "1.5.1" +version = "1.5.2" [[deps.PrettyPrint]] git-tree-sha1 = "632eb4abab3449ab30c5e1afaa874f0b98b586e4" @@ -2237,9 +2269,9 @@ version = "0.2.0" [[deps.PrettyTables]] deps = ["Crayons", "LaTeXStrings", "Markdown", "PrecompileTools", "Printf", "REPL", "Reexport", "StringManipulation", "Tables"] -git-tree-sha1 = "211530a7dc76ab59087f4d4d1fc3f086fbe87594" +git-tree-sha1 = "624de6279ab7d94fc9f672f0068107eb6619732c" uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d" -version = "3.2.3" +version = "3.3.2" [deps.PrettyTables.extensions] PrettyTablesTypstryExt = "Typstry" @@ -2271,9 +2303,9 @@ uuid = "92933f4c-e287-5a05-a399-4b506db050ca" version = "1.11.0" [[deps.PtrArrays]] -git-tree-sha1 = "1d36ef11a9aaf1e8b74dacc6a731dd1de8fd493d" +git-tree-sha1 = "4fbbafbc6251b883f4d2705356f3641f3652a7fe" uuid = "43287f4e-b6f4-7ad1-bb20-aadabca52c3d" -version = "1.3.0" +version = "1.4.0" [[deps.QOI]] deps = ["ColorTypes", "FileIO", "FixedPointNumbers"] @@ -2283,33 +2315,39 @@ version = "1.0.2" [[deps.Qt6Base_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Fontconfig_jll", "Glib_jll", "JLLWrappers", "Libdl", "Libglvnd_jll", "OpenSSL_jll", "Vulkan_Loader_jll", "Xorg_libSM_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Xorg_libxcb_jll", "Xorg_xcb_util_cursor_jll", "Xorg_xcb_util_image_jll", "Xorg_xcb_util_keysyms_jll", "Xorg_xcb_util_renderutil_jll", "Xorg_xcb_util_wm_jll", "Zlib_jll", "libinput_jll", "xkbcommon_jll"] -git-tree-sha1 = "34f7e5d2861083ec7596af8b8c092531facf2192" +git-tree-sha1 = "d7a4bff94f42208ce3cf6bc8e4e7d1d663e7ee8b" uuid = "c0090381-4147-56d7-9ebc-da0b1113ec56" -version = "6.8.2+2" +version = "6.10.2+1" [[deps.Qt6Declarative_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll", "Qt6ShaderTools_jll"] -git-tree-sha1 = "da7adf145cce0d44e892626e647f9dcbe9cb3e10" +deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll", "Qt6ShaderTools_jll", "Qt6Svg_jll"] +git-tree-sha1 = "d5b7dd0e226774cbd87e2790e34def09245c7eab" uuid = "629bc702-f1f5-5709-abd5-49b8460ea067" -version = "6.8.2+1" +version = "6.10.2+1" [[deps.Qt6ShaderTools_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll"] -git-tree-sha1 = "9eca9fc3fe515d619ce004c83c31ffd3f85c7ccf" +git-tree-sha1 = "4d85eedf69d875982c46643f6b4f66919d7e157b" uuid = "ce943373-25bb-56aa-8eca-768745ed7b5a" -version = "6.8.2+1" +version = "6.10.2+1" + +[[deps.Qt6Svg_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll"] +git-tree-sha1 = "81587ff5ff25a4e1115ce191e36285ede0334c9d" +uuid = "6de9746b-f93d-5813-b365-ba18ad4a9cf3" +version = "6.10.2+0" [[deps.Qt6Wayland_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll", "Qt6Declarative_jll"] -git-tree-sha1 = "8f528b0851b5b7025032818eb5abbeb8a736f853" +git-tree-sha1 = "672c938b4b4e3e0169a07a5f227029d4905456f2" uuid = "e99dba38-086e-5de3-a5b1-6e4c66e897c3" -version = "6.8.2+2" +version = "6.10.2+1" [[deps.QuadGK]] deps = ["DataStructures", "LinearAlgebra"] -git-tree-sha1 = "9da16da70037ba9d701192e27befedefb91ec284" +git-tree-sha1 = "5e8e8b0ab68215d7a2b14b9921a946fee794749e" uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc" -version = "2.11.2" +version = "2.11.3" [deps.QuadGK.extensions] QuadGKEnzymeExt = "Enzyme" @@ -2356,9 +2394,9 @@ weakdeps = ["FixedPointNumbers"] [[deps.ReactantCore]] deps = ["ExpressionExplorer", "MacroTools"] -git-tree-sha1 = "f3e31b90afcd152578a6c389eae46dd38b9a4f38" +git-tree-sha1 = "5b9e0fe7fb2cf3794fd96ac32bf2732aa4bb9776" uuid = "a3311ec8-5e00-46d5-b541-4f83e724a433" -version = "0.1.16" +version = "0.1.19" [[deps.RealDot]] deps = ["LinearAlgebra"] @@ -2380,12 +2418,13 @@ version = "0.6.12" [[deps.RecursiveArrayTools]] deps = ["Adapt", "ArrayInterface", "DocStringExtensions", "GPUArraysCore", "LinearAlgebra", "PrecompileTools", "RecipesBase", "StaticArraysCore", "SymbolicIndexingInterface"] -git-tree-sha1 = "18d2a6fd1ea9a8205cadb3a5704f8e51abdd748b" +git-tree-sha1 = "d0282d612f22dcad7b81cf487b746e63aa2a6709" uuid = "731186ca-8d62-57ce-b412-fbd966d074cd" -version = "3.48.0" +version = "3.54.0" [deps.RecursiveArrayTools.extensions] RecursiveArrayToolsFastBroadcastExt = "FastBroadcast" + RecursiveArrayToolsFastBroadcastPolyesterExt = ["FastBroadcast", "Polyester"] RecursiveArrayToolsForwardDiffExt = "ForwardDiff" RecursiveArrayToolsKernelAbstractionsExt = "KernelAbstractions" RecursiveArrayToolsMeasurementsExt = "Measurements" @@ -2404,6 +2443,7 @@ version = "3.48.0" KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c" Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" MonteCarloMeasurements = "0987c9cc-fe09-11e8-30f0-b96dd679fdca" + Polyester = "f517fe37-dbe3-4b94-8317-1923a5111588" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -2443,9 +2483,9 @@ version = "0.5.1+0" [[deps.Roots]] deps = ["Accessors", "CommonSolve", "Printf"] -git-tree-sha1 = "8a433b1ede5e9be9a7ba5b1cc6698daa8d718f1d" +git-tree-sha1 = "b2f70f34eb9973572d55c332933c6a04c911f549" uuid = "f2b01f46-fcfa-551c-844a-d8ac1e96c665" -version = "2.2.10" +version = "2.2.14" [deps.Roots.extensions] RootsChainRulesCoreExt = "ChainRulesCore" @@ -2497,9 +2537,9 @@ version = "0.6.43" [[deps.SciMLBase]] deps = ["ADTypes", "Accessors", "Adapt", "ArrayInterface", "CommonSolve", "ConstructionBase", "Distributed", "DocStringExtensions", "EnumX", "FunctionWrappersWrappers", "IteratorInterfaceExtensions", "LinearAlgebra", "Logging", "Markdown", "Moshi", "PreallocationTools", "PrecompileTools", "Preferences", "Printf", "RecipesBase", "RecursiveArrayTools", "Reexport", "RuntimeGeneratedFunctions", "SciMLLogging", "SciMLOperators", "SciMLPublic", "SciMLStructures", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface"] -git-tree-sha1 = "f2c5ddf74a49c1fd74e511d31168f0354c6c1e19" +git-tree-sha1 = "908c0bf271604d09393a21c142116ab26f66f67c" uuid = "0bca4576-84f4-4d90-8ffe-ffa030f20462" -version = "2.138.1" +version = "2.154.0" [deps.SciMLBase.extensions] SciMLBaseChainRulesCoreExt = "ChainRulesCore" @@ -2542,9 +2582,9 @@ version = "2.138.1" [[deps.SciMLLogging]] deps = ["Logging", "LoggingExtras", "Preferences"] -git-tree-sha1 = "a51104fa06a784c5ddf4a9937c5e0c03de0c6878" +git-tree-sha1 = "0161be062570af4042cf6f69e3d5d0b0555b6927" uuid = "a6db7da4-7206-11f0-1eab-35f2a5dbe1d1" -version = "1.9.0" +version = "1.9.1" weakdeps = ["Tracy"] [deps.SciMLLogging.extensions] @@ -2552,9 +2592,9 @@ weakdeps = ["Tracy"] [[deps.SciMLOperators]] deps = ["Accessors", "ArrayInterface", "DocStringExtensions", "LinearAlgebra"] -git-tree-sha1 = "794c760e6aafe9f40dcd7dd30526ea33f0adc8b7" +git-tree-sha1 = "234869cf9fee9258a95464b7a7065cc7be84db00" uuid = "c0aeaf25-5076-4817-a8d5-81caf7dfa961" -version = "1.15.1" +version = "1.16.0" weakdeps = ["SparseArrays", "StaticArraysCore"] [deps.SciMLOperators.extensions] @@ -2580,9 +2620,9 @@ version = "3.1.0" [[deps.ScopedValues]] deps = ["HashArrayMappedTries", "Logging"] -git-tree-sha1 = "c3b2323466378a2ba15bea4b2f73b081e022f473" +git-tree-sha1 = "ac4b837d89a58c848e85e698e2a2514e9d59d8f6" uuid = "7e506255-f358-4e82-b7e4-beb19740aa63" -version = "1.5.0" +version = "1.6.0" [[deps.Scratch]] deps = ["Dates"] @@ -2694,9 +2734,9 @@ version = "0.1.2" [[deps.SparseMatrixColorings]] deps = ["ADTypes", "DocStringExtensions", "LinearAlgebra", "PrecompileTools", "Random", "SparseArrays"] -git-tree-sha1 = "6ed48d9a3b22417c765dc273ae3e1e4de035e7c8" +git-tree-sha1 = "1c1be8c6fdfaf9b6c9e156c509e672953b8e6af7" uuid = "0a514795-09f3-496d-8182-132a7b665d35" -version = "0.4.23" +version = "0.4.26" [deps.SparseMatrixColorings.extensions] SparseMatrixColoringsCUDAExt = "CUDA" @@ -2713,9 +2753,9 @@ version = "0.4.23" [[deps.SpecialFunctions]] deps = ["IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"] -git-tree-sha1 = "5acc6a41b3082920f79ca3c759acbcecf18a8d78" +git-tree-sha1 = "2700b235561b0335d5bef7097a111dc513b8655e" uuid = "276daf66-3868-5448-9aa4-cd146d93841b" -version = "2.7.1" +version = "2.7.2" weakdeps = ["ChainRulesCore"] [deps.SpecialFunctions.extensions] @@ -2758,9 +2798,9 @@ weakdeps = ["OffsetArrays", "StaticArrays"] [[deps.StaticArrays]] deps = ["LinearAlgebra", "PrecompileTools", "Random", "StaticArraysCore"] -git-tree-sha1 = "eee1b9ad8b29ef0d936e3ec9838c7ec089620308" +git-tree-sha1 = "246a8bb2e6667f832eea063c3a56aef96429a3db" uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.9.16" +version = "1.9.18" weakdeps = ["ChainRulesCore", "Statistics"] [deps.StaticArrays.extensions] @@ -2819,15 +2859,15 @@ version = "0.5.8" [[deps.StringManipulation]] deps = ["PrecompileTools"] -git-tree-sha1 = "a3c1536470bf8c5e02096ad4853606d7c8f62721" +git-tree-sha1 = "d05693d339e37d6ab134c5ab53c29fce5ee5d7d5" uuid = "892a3eda-7b42-436c-8928-eab12a02cf0e" -version = "0.4.2" +version = "0.4.4" [[deps.StructArrays]] deps = ["ConstructionBase", "DataAPI", "Tables"] -git-tree-sha1 = "a2c37d815bf00575332b7bd0389f771cb7987214" +git-tree-sha1 = "ad8002667372439f2e3611cfd14097e03fa4bccd" uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" -version = "0.7.2" +version = "0.7.3" weakdeps = ["Adapt", "GPUArraysCore", "KernelAbstractions", "LinearAlgebra", "SparseArrays", "StaticArrays"] [deps.StructArrays.extensions] @@ -2839,16 +2879,18 @@ weakdeps = ["Adapt", "GPUArraysCore", "KernelAbstractions", "LinearAlgebra", "Sp [[deps.StructUtils]] deps = ["Dates", "UUIDs"] -git-tree-sha1 = "28145feabf717c5d65c1d5e09747ee7b1ff3ed13" +git-tree-sha1 = "fa95b3b097bcef5845c142ea2e085f1b2591e92c" uuid = "ec057cc2-7a8d-4b58-b3b3-92acb9f63b42" -version = "2.6.3" +version = "2.7.1" [deps.StructUtils.extensions] StructUtilsMeasurementsExt = ["Measurements"] + StructUtilsStaticArraysCoreExt = ["StaticArraysCore"] StructUtilsTablesExt = ["Tables"] [deps.StructUtils.weakdeps] Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" + StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" [[deps.StyledStrings]] @@ -2927,9 +2969,9 @@ version = "0.5.5" [[deps.TiffImages]] deps = ["ColorTypes", "DataStructures", "DocStringExtensions", "FileIO", "FixedPointNumbers", "IndirectArrays", "Inflate", "Mmap", "OffsetArrays", "PkgVersion", "PrecompileTools", "ProgressMeter", "SIMD", "UUIDs"] -git-tree-sha1 = "98b9352a24cb6a2066f9ababcc6802de9aed8ad8" +git-tree-sha1 = "08c10bc34f4e7743f530793d0985bf3c254e193d" uuid = "731e570b-9d59-4bfa-96dc-6df516fadf69" -version = "0.11.6" +version = "0.11.8" [[deps.Tracy]] deps = ["ExprTools", "LibTracyClient_jll", "Libdl"] @@ -2975,6 +3017,12 @@ git-tree-sha1 = "4d4ed7f294cda19382ff7de4c137d24d16adc89b" uuid = "981d1d27-644d-49a2-9326-4793e63143c3" version = "0.1.0" +[[deps.TruncatedStacktraces]] +deps = ["InteractiveUtils", "MacroTools", "Preferences"] +git-tree-sha1 = "ea3e54c2bdde39062abf5a9758a23735558705e1" +uuid = "781d530d-4396-4725-bb49-402e4bee1e77" +version = "1.4.0" + [[deps.URIs]] git-tree-sha1 = "bef26fb046d031353ef97a82e3fdb6afe7f21b1a" uuid = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4" @@ -3016,9 +3064,9 @@ weakdeps = ["ConstructionBase", "ForwardDiff", "InverseFunctions", "LaTeXStrings PrintfExt = "Printf" [[deps.UnsafeAtomics]] -git-tree-sha1 = "b13c4edda90890e5b04ba24e20a310fbe6f249ff" +git-tree-sha1 = "0f30765c32d66d58e41f4cb5624d4fc8a82ec13b" uuid = "013be700-e6cd-48c3-b4a1-df204f14c38f" -version = "0.3.0" +version = "0.3.1" weakdeps = ["LLVM"] [deps.UnsafeAtomics.extensions] @@ -3091,9 +3139,9 @@ version = "1.1.0" [[deps.XZ_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "9cce64c0fdd1960b597ba7ecda2950b5ed957438" +git-tree-sha1 = "b29c22e245d092b8b4e8d3c09ad7baa586d9f573" uuid = "ffd25f8a-64ca-5728-b0f7-c24cf3aae800" -version = "5.8.2+0" +version = "5.8.3+0" [[deps.Xorg_libICE_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] @@ -3167,6 +3215,12 @@ git-tree-sha1 = "7ed9347888fac59a618302ee38216dd0379c480d" uuid = "ea2f1a96-1ddc-540d-b46f-429655e07cfa" version = "0.9.12+0" +[[deps.Xorg_libpciaccess_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"] +git-tree-sha1 = "4909eb8f1cbf6bd4b1c30dd18b2ead9019ef2fad" +uuid = "a65dc6b1-eb27-53a1-bb3e-dea574b5389e" +version = "0.18.1+0" + [[deps.Xorg_libxcb_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libXau_jll", "Xorg_libXdmcp_jll"] git-tree-sha1 = "bfcaf7ec088eaba362093393fe11aa141fa15422" @@ -3270,9 +3324,9 @@ version = "0.2.7" [[deps.cuDNN]] deps = ["CEnum", "CUDA", "CUDA_Runtime_Discovery", "CUDNN_jll"] -git-tree-sha1 = "c1e756c5b075d06f19595ac0bc6388ab2973237a" +git-tree-sha1 = "5494b0ae3ddc5ca0f64159d5ed3a396f36e0fcfe" uuid = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd" -version = "1.4.6" +version = "1.4.7" [[deps.demumble_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] @@ -3321,6 +3375,12 @@ git-tree-sha1 = "9bf7903af251d2050b467f76bdbe57ce541f7f4f" uuid = "1183f4f0-6f2a-5f1a-908b-139f9cdfea6f" version = "0.2.2+0" +[[deps.libdrm_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libpciaccess_jll"] +git-tree-sha1 = "63aac0bcb0b582e11bad965cef4a689905456c03" +uuid = "8e53e030-5e6c-5a89-a30b-be5b7263a166" +version = "2.4.125+1" + [[deps.libevdev_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] git-tree-sha1 = "56d643b57b188d30cccc25e331d416d3d358e557" @@ -3341,9 +3401,9 @@ version = "1.28.1+0" [[deps.libpng_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"] -git-tree-sha1 = "e015f211ebb898c8180887012b938f3851e719ac" +git-tree-sha1 = "e2a7072fc0cdd7949528c1455a3e5da4122e1153" uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f" -version = "1.6.55+0" +version = "1.6.56+0" [[deps.libsixel_jll]] deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "libpng_jll"] @@ -3351,6 +3411,12 @@ git-tree-sha1 = "c1733e347283df07689d71d61e14be986e49e47a" uuid = "075b6546-f08a-558a-be8f-8157d0f608a5" version = "1.10.5+0" +[[deps.libva_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll", "Xorg_libXext_jll", "Xorg_libXfixes_jll", "libdrm_jll"] +git-tree-sha1 = "7dbf96baae3310fe2fa0df0ccbb3c6288d5816c9" +uuid = "9a156e7d-b971-5f62-b2c9-67348b8fb97c" +version = "2.23.0+0" + [[deps.libvorbis_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Ogg_jll"] git-tree-sha1 = "11e1772e7f3cc987e9d3de991dd4f6b2602663a5" diff --git a/docs/src/tutorials/basic_cpu.md b/docs/src/tutorials/basic_cpu.md index 18d8b38..ce97015 100644 --- a/docs/src/tutorials/basic_cpu.md +++ b/docs/src/tutorials/basic_cpu.md @@ -35,7 +35,8 @@ $$ function f_doubleMM(θc::CA.ComponentVector{ET}, x) where ET # extract parameters not depending on order, i.e whether they are in θP or θM @unpack r0, r1, K1, K2 = θc - r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) + y = r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) + (y, y[1:0]) end ``` @@ -44,6 +45,10 @@ or fixed during the model inversion. However, it cannot assume an ordering in the parameters, but needs to access the components by its symbolic names in the provided `ComponentArray`. +In addition to the predictions, the PBM returns additional quantities that may +be used in penalizing unrealistic conditions independent of observations. +In this simple example, just an empty vector is returned. + ## Likelihood function HVI requires the evaluation of the likelihood of the predictions. @@ -181,8 +186,11 @@ PBM parameters, given the covariates. Here, we specify a 3-layer feed-forward neural network using the `SimpleChains` framework which works efficiently on CPU. +The ML model predicts the components of θM and an additional uncertainty +factor per site. + ``` julia -n_out = length(θM) # number of individuals to predict +n_out = length(θM) + 1 # number of individuals to predict, and uncertainty factor n_input = n_covar = size(xM,1) g_chain = SimpleChain( @@ -216,10 +224,16 @@ specific prior distribution forms. However, for simplicity, a [`NormalScalingModelApplicator`](@ref) is fitted to the transformed 5% and 95% quantiles of the original prior. +All raw ML output are in the range (0,1). We want to scale the predictions +of the mean parameters to the likeli range on unconstrained scale, but do +not scale the output for the uncertainty factor. This is taken care of +by the, `range_scaled`, keyword argument. + ``` julia priorsM = Tuple(priors_dict[k] for k in keys(θM)) lowers, uppers = get_quantile_transformed(priorsM, transM) -g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT) +range_scaled = 1:length(lowers) # do only scale means, but not the uncertainty factor +g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT; range_scaled) ``` The `g_chain_scaled` `ModelApplicator` now predicts in unconstrained scale, @@ -227,22 +241,44 @@ transforms logistic predctions around 0.5 to the range of high prior probability of the parameters, and transforms ML predictions near 0 or 1 towards the outer lower probability ranges. +## Setting up the Approximation strategy and the initial parameters + +Here, we are using the `MeanScalingHVIApproximation` approximation of the posterior +density, where the ML model predicts the means of the model at unconstrained scale +and a multiplier, i.e offset at log scale, of the main diagonal of the +covariance matrix for each site. + +We need to specify the structure of scaling blocks (here we use on block for +all parameters), the magnitude of the variance (that will be multiplied by the +site factor) at log-scale. + +Given this information, the initial values to be optimized and some information +in the approximator can be initialized. + +``` julia +block_ends = [length(θM)] # one scaling factor of all parameters +σ = FT(0.1) .* θM[block_ends] # standard deviation of 10% of values of template +logσ2 = FT(2) .* log.(σ) # transform to log_var scale +approx = MeanScalingHVIApproximation(block_ends, logσ2) + +(;ϕqc, approx) = init_hybrid_ϕq(approx, θP, θM, transP; n_site, transM) +``` + ## Assembling the information All the specifications above are stored in a [`HybridProblem`](@ref) structure. -Before, a [`PBMSiteApplicator`](@ref) is constructed that translates an invocation -given a vector of global parameters, and a matrix of site parameters to +Before, a [`PBMSiteApplicator`](@ref) is constructed that efficiently +translates an invocation given +a vector of global parameters, and a matrix of site parameters to invocation of the process based model (PBM), defined at the beginning. ``` julia -approx = MeanHVIApproximation() f_batch = PBMSiteApplicator(f_doubleMM; θP, θM, θFix, xPvec=xP[:,1]) -ϕq0 = init_hybrid_ϕq(approx, θP, θM, transP; n_site, transM) -prob = HybridProblem(θM, ϕq0, g_chain_scaled, ϕg0, +prob = HybridProblem(θM, ϕqc, g_chain_scaled, ϕg0, f_batch, priors_dict, py, - transM, transP, train_dataloader, test_data,n_covar, n_site, n_batch; approx) + transM, transP, train_dataloader, test_data, n_site, n_batch; approx) ``` ## Perform the inversion @@ -311,7 +347,8 @@ function f_doubleMM_sites(θc_tr::CA.ComponentMatrix, xPc::CA.ComponentMatrix) K1 = is_valid .* CA.getdata(θc_tr[:, Val(:K1)])' K2 = is_valid .* CA.getdata(θc_tr[:, Val(:K2)])' # each variable is a matrix (n_obs x n_site) - r0 .+ r1 .* S1 ./ (K1 .+ S1) .* S2 ./ (K2 .+ S2) + y = r0 .+ r1 .* S1 ./ (K1 .+ S1) .* S2 ./ (K2 .+ S2) + (y, y[1:0,:]) end ``` @@ -347,8 +384,8 @@ in the following [Inspect results of fitted problem](@ref) tutorial. In order to use the results from this tutorial in other tutorials, the updated `probo` `HybridProblem` and the interpreters are saved to a JLD2 file. -Before the problem is updated, so that it uses the redefinition [`DoubleMM.f_doubleMM_sites`](@ref) -of the PBM in module `DoubleMM` rather than +Before the problem is updated, it uses the redefinition [`DoubleMM.f_doubleMM_sites`](@ref) +of the PBM in module `DoubleMM` rather than what we defined in module `Main` to allow for easier reloading with JLD2. ``` julia diff --git a/docs/src/tutorials/basic_cpu.qmd b/docs/src/tutorials/basic_cpu.qmd index 86def5c..3915a4e 100644 --- a/docs/src/tutorials/basic_cpu.qmd +++ b/docs/src/tutorials/basic_cpu.qmd @@ -46,7 +46,8 @@ $$ function f_doubleMM(θc::CA.ComponentVector{ET}, x) where ET # extract parameters not depending on order, i.e whether they are in θP or θM @unpack r0, r1, K1, K2 = θc - r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) + y = r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) + (y, y[1:0]) end ``` @@ -55,6 +56,10 @@ or fixed during the model inversion. However, it cannot assume an ordering in the parameters, but needs to access the components by its symbolic names in the provided `ComponentArray`. +In addition to the predictions, the PBM returns additional quantities that may +be used in penalizing unrealistic conditions independent of observations. +In this simple example, just an empty vector is returned. + ## Likelihood function HVI requires the evaluation of the likelihood of the predictions. @@ -185,6 +190,29 @@ train_dataloader = MLUtils.DataLoader( CA.getdata.(values(train_data)), batchsize=n_batch, partial=false) ``` +## Setting up the Approximation strategy and the initial parameters + +Here, we are using the `MeanScalingHVIApproximation` approximation of the posterior +density, where the ML model predicts the means of the model at unconstrained scale +and a multiplier, i.e offset at log scale, of the main diagonal of the +covariance matrix for each site. + +We need to specify the structure of scaling blocks (here we use on block for +all parameters), the magnitude of the variance (that will be multiplied by the +site factor) at log-scale. + +Given this information, the initial values to be optimized and some information +in the approximator can be initialized. + +```{julia} +block_ends = [length(θM)] # one scaling factor of all parameters +σ = FT(0.1) .* θM[block_ends] # standard deviation of 10% of values of template +logσ2 = FT(2) .* log.(σ) # transform to log_var scale +approx = MeanScalingHVIApproximation(block_ends, logσ2) + +(;ϕqc, approx) = init_hybrid_ϕq(approx, θP, θM, transP; n_site, transM) +``` + ## The Machine-Learning model The machine-learning (ML) part predicts parameters of the posterior of site-specific @@ -192,8 +220,12 @@ PBM parameters, given the covariates. Here, we specify a 3-layer feed-forward neural network using the `SimpleChains` framework which works efficiently on CPU. +The ML model predicts the components of θM and an additional uncertainty +factor per site. + ```{julia} -n_out = length(θM) # number of individuals to predict +#n_out = length(θM) + 1 # number of individuals to predict, and uncertainty factor +n_out = get_numberof_MLinputs(approx, θM) n_input = n_covar = size(xM,1) g_chain = SimpleChain( @@ -227,10 +259,16 @@ specific prior distribution forms. However, for simplicity, a [`NormalScalingModelApplicator`](@ref) is fitted to the transformed 5% and 95% quantiles of the original prior. +All raw ML output are in the range (0,1). We want to scale the predictions +of the mean parameters to the likeli range on unconstrained scale, but do +not scale the output for the uncertainty factor. This is taken care of +by the, `range_scaled`, keyword argument. + ```{julia} priorsM = Tuple(priors_dict[k] for k in keys(θM)) lowers, uppers = get_quantile_transformed(priorsM, transM) -g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT) +range_scaled = 1:length(lowers) # do only scale means, but not the uncertainty factor +g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT; range_scaled) ``` The `g_chain_scaled` `ModelApplicator` now predicts in unconstrained scale, @@ -243,18 +281,12 @@ and transforms ML predictions near 0 or 1 towards the outer lower probability ra All the specifications above are stored in a [`HybridProblem`](@ref) structure. -Before, a [`PBMSiteApplicator`](@ref) is constructed that translates an invocation -given a vector of global parameters, and a matrix of site parameters to -invocation of the process based model (PBM), defined at the beginning. - ```{julia} -approx = MeanHVIApproximation() f_batch = PBMSiteApplicator(f_doubleMM; θP, θM, θFix, xPvec=xP[:,1]) -ϕq0 = init_hybrid_ϕq(approx, θP, θM, transP; n_site, transM) -prob = HybridProblem(θM, ϕq0, g_chain_scaled, ϕg0, +prob = HybridProblem(θM, ϕqc, g_chain_scaled, ϕg0, f_batch, priors_dict, py, - transM, transP, train_dataloader, test_data,n_covar, n_site, n_batch; approx) + transM, transP, train_dataloader, test_data, n_site, n_batch; approx) ``` ```{julia} @@ -349,7 +381,8 @@ function f_doubleMM_sites(θc_tr::CA.ComponentMatrix, xPc::CA.ComponentMatrix) K1 = is_valid .* CA.getdata(θc_tr[:, Val(:K1)])' K2 = is_valid .* CA.getdata(θc_tr[:, Val(:K2)])' # each variable is a matrix (n_obs x n_site) - r0 .+ r1 .* S1 ./ (K1 .+ S1) .* S2 ./ (K2 .+ S2) + y = r0 .+ r1 .* S1 ./ (K1 .+ S1) .* S2 ./ (K2 .+ S2) + (y, y[1:0,:]) end ``` @@ -384,8 +417,8 @@ in the following [Inspect results of fitted problem](@ref) tutorial. In order to use the results from this tutorial in other tutorials, the updated `probo` `HybridProblem` and the interpreters are saved to a JLD2 file. -Before the problem is updated, so that it uses the redefinition [`DoubleMM.f_doubleMM_sites`](@ref) -of the PBM in module `DoubleMM` rather than +Before the problem is updated, it uses the redefinition [`DoubleMM.f_doubleMM_sites`](@ref) +of the PBM in module `DoubleMM` rather than what we defined in module `Main` to allow for easier reloading with JLD2. ```{julia} diff --git a/docs/src/tutorials/basic_cpu_mean.md b/docs/src/tutorials/basic_cpu_mean.md new file mode 100644 index 0000000..8c6892a --- /dev/null +++ b/docs/src/tutorials/basic_cpu_mean.md @@ -0,0 +1,377 @@ +# Basic workflow without GPU using MeanHVIApproximation + + +``` @meta +CurrentModule = HybridVariationalInference +``` + +Similar to the [Basic workflow without GPU](@ref), but using an approximation of +the posterior, that does not estimate a site-specific scaling factor of +the uncertainty, but estimates uncertainty scaling with the parameter value +at unconstrained scale. + +First load necessary packages. + +``` julia +using HybridVariationalInference +using HybridVariationalInference: HybridVariationalInference as HVI +using ComponentArrays: ComponentArrays as CA +using Bijectors +using StableRNGs +using SimpleChains +using StatsFuns +using MLUtils +using DistributionFits +using UnPack +``` + +Next, specify many moving parts of the Hybrid variational inference (HVI) + +## The process-based model + +The example process based model (PBM) predicts a double-monod constrained rate +for different substrate concentrations, `S1`, and `S2`. + +$$ +y = r_0+ r_1 \frac{S_1}{K_1 + S_1} \frac{S_2}{K_2 + S_2} +$$ + +``` julia +function f_doubleMM(θc::CA.ComponentVector{ET}, x) where ET + # extract parameters not depending on order, i.e whether they are in θP or θM + @unpack r0, r1, K1, K2 = θc + y = r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) + (y, y[1:0]) +end +``` + +Its formulation is independent of which parameters are global, site-specific, +or fixed during the model inversion. +However, it cannot assume an ordering in the parameters, but needs to +access the components by its symbolic names in the provided `ComponentArray`. + +In addition to the predictions, the PBM returns additional quantities that may +be used in penalizing unrealistic conditions independent of observations. +In this simple example, just an empty vector is returned. + +## Likelihood function + +HVI requires the evaluation of the likelihood of the predictions. +It corresponds to the cost of predictions given some observations. + +The user specifies a function of the negative log-Likelihood +`neg_logden(obs, pred, uncertainty_parameters)`, +where all of the parameters are arrays with columns for sites. + +Here, we use the [`neg_logden_indep_normal`](@ref) function +that assumed observations to be distributed independently +normal around a true value. +The provided `y_unc` uncertainty parameters, here, corresponds to +`logσ2`, denoting the log of the variance parameter of the normal distribution. + +``` julia +py = neg_logden_indep_normal +``` + +## Global-Site, transformations, and priors + +### Global and site-specific parameters + +In this example, we will assign a fixed value to r0 parameter, treat +the K2 parameter as unknown but the same across sites, and predict +r1 and K1 for each site separately, based on covariates known at the sites. + +Here we provide initial values for them by using `ComponentVector`. + +``` julia +FT = Float32 +θM0 = θM = CA.ComponentVector{FT}(r1=0.5, K1=0.2) # separately for each individual +θP0 = θP = CA.ComponentVector{FT}(K2=2.0) # population: same across individuals, +θFix = CA.ComponentVector{FT}(r0=0.3) # r0, i.e. not estimated +``` + +### Parameter Transformations + +HVI allows for transformations of parameters in an unconstrained space, +where the probability density is not strictly zero anywhere to the original +constrained space. + +Here, our model parameters are strictly positive, and we use the exponential function +to transform unconstrained estimates to the original constrained domain. + +``` julia +transP = Stacked(HVI.Exp()) +transM = Stacked(HVI.Exp(), HVI.Exp()) +``` + +Parameter transformations are specified using the `Bijectors` package. +Because, `Bijectors.elementwise(exp)`, has problems with automatic differentiation (AD) +on GPU, we use the public but non-exported [`Exp`](@ref) wrapper inside `Bijectors.Stacked`. + +### Prior information on parameters at constrained scale + +HVI is an approximate bayesian analysis and combines prior information on +the parameters with the model and observed data. + +Here, we provide a wide prior by fitting a Lognormal distributions to +- the mode corresponding to the initial value provided above +- the 0.95-quantile 3 times the mode +using the `DistributionFits.jl` package. + +``` julia +θall = vcat(θP, θM) +priors_dict = Dict{Symbol, Distribution}( + keys(θall) .=> fit.(LogNormal, θall, QuantilePoint.(θall .* 3, 0.95), Val(:mode))) +``` + +## Observations, model drivers and covariates + +The model parameters are inverted using information on the +- observed data, `y_o` +- its uncertainty, `y_unc` +- known covariates across sites, `xM` +- model drivers, `xP` +Here, we use synthetic data generated by the package. + +``` julia +rng = StableRNG(111) +n_site_test = 60 +(; xM, xP, y_o, y_unc) = gen_hybridproblem_synthetic( + rng, DoubleMM.DoubleMMCase(); n_site_test, scenario=Val((:omit_r0,))) +n_site = size(y_o,2) - n_site_test +i_test = n_site .+ (1:n_site_test) +i_train = 1:n_site +test_data = (; xM = xM[:, i_test], xP = xP[:, i_test], + y_o = y_o[:, i_test], y_unc = y_unc[:, i_test], i_site = i_test) +train_data = (; xM = xM[:, i_train], xP = xP[:, i_train], + y_o = y_o[:, i_train], y_unc = y_unc[:, i_train], i_site = i_train) +``` + +Lets look at them. + +``` julia +map(size,train_data) +``` + + (xM = (5, 800), xP = (16, 800), y_o = (8, 800), y_unc = (8, 800), i_site = (800,)) + +All of them have 800 columns, corresponding to 800 sites. +There are 5 site-covaritas, 16 values of model drivers, and 8 observations per site. + +``` julia +xP[:,1] +``` + + ComponentVector{Float32}(S1 = Float32[0.5, 0.5, 0.5, 0.5, 0.4, 0.3, 0.2, 0.1], S2 = Float32[1.0, 3.0, 4.0, 5.0, 5.0, 5.0, 5.0, 5.0]) + +In each column of the model drivers there is a ComponentVector with +components S1 and S2 corresponding to the concentrations, for which outputs +were observed. +This allows notation `x.S1` in the PBM above. + +The `y_unc` becomes its meaning by the Likelihood-function to be specified with +the problem below. + +### Providing data in batches + +HVI uses `MLUtils.DataLoader` to provide batches of the data during each +iteration of the solver. In addition to the data, it provides an +index to the sites inside a tuple. + +``` julia +n_batch = 20 +train_dataloader = MLUtils.DataLoader( + CA.getdata.(values(train_data)), batchsize=n_batch, partial=false) +``` + +## The Machine-Learning model + +The machine-learning (ML) part predicts parameters of the posterior of site-specific +PBM parameters, given the covariates. +Here, we specify a 3-layer feed-forward neural network using the `SimpleChains` +framework which works efficiently on CPU. + +``` julia +n_out = length(θM) # number of individuals to predict +n_input = n_covar = size(xM,1) + +g_chain = SimpleChain( + static(n_input), # input dimension (optional) + TurboDense{true}(tanh, n_input * 4), + TurboDense{true}(tanh, n_input * 4), + # dense layer without bias that maps to n outputs to (0..1) + TurboDense{false}(logistic, n_out) +) +# get a template of the parameter vector, ϕg0 +g_chain_app, ϕg0 = construct_ChainsApplicator(rng, g_chain) +``` + +The `g_chain_app` `ChainsApplicator` predicts the parameters of the posterior, +approximation given a vector of ML weights,`ϕg`. +During construction, an initial template of this vector is created. +This abstraction layer allows to use different ML frameworks and replace the +`SimpleChains` model by `Flux` or `Lux`. + +### Using priors to scale ML-parameter estimates + +In order to balance gradients, the `g_chain_app` ModelApplicator defined above +predicts on a scale (0..1). +Now the priors are used to translate this to the parameter range by using the +cumulative density distribution. + +Priors were specified at constrained scale, but the ML model predicts +parameters on unconstrained scale. +This transformation of the distribution can be mathematically worked out for +specific prior distribution forms. +However, for simplicity, a [`NormalScalingModelApplicator`](@ref) +is fitted to the transformed 5% and 95% quantiles of the original prior. + +``` julia +priorsM = Tuple(priors_dict[k] for k in keys(θM)) +lowers, uppers = get_quantile_transformed(priorsM, transM) +g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT) +``` + +The `g_chain_scaled` `ModelApplicator` now predicts in unconstrained scale, +transforms logistic predctions around 0.5 to the range of +high prior probability of the parameters, +and transforms ML predictions near 0 or 1 towards the outer lower probability ranges. + +## Assembling the information + +All the specifications above are stored in a [`HybridProblem`](@ref) structure. + +Before, a [`PBMSiteApplicator`](@ref) is constructed that translates an invocation +given a vector of global parameters, and a matrix of site parameters to +invocation of the process based model (PBM), defined at the beginning. + +``` julia +approx = MeanHVIApproximation() +f_batch = PBMSiteApplicator(f_doubleMM; θP, θM, θFix, xPvec=xP[:,1]) +(;ϕqc, approx) = init_hybrid_ϕq(approx, θP, θM, transP; n_site, transM) + +prob = HybridProblem(θM, ϕqc, g_chain_scaled, ϕg0, + f_batch, priors_dict, py, + transM, transP, train_dataloader, test_data, n_site, n_batch; approx) +``` + +## Perform the inversion + +Eventually, having assembled all the moving parts of the HVI, we can perform +the inversion. + +``` julia +# silence warning of no GPU backend found (because we did not import CUDA here) +ENV["MLDATADEVICES_SILENCE_WARN_NO_GPU"] = 1 +``` + +``` julia +using OptimizationOptimisers +import Zygote + +solver = HybridPosteriorSolver(; alg=Adam(0.02), n_MC=3) + +(; probo, interpreters) = solve(prob, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 2, +); +``` + +The solver object is constructed given the specific stochastic optimization algorithm +and the number of Monte-Carlo samples that are drawn in each iteration +from the predicted parameter posterior. + +Then the solver is applied to the problem using [`solve`](@ref) +for a given number of iterations or epochs. +For this tutorial, we additionally specify that the function to transfer structures to +the GPU is the identity function, so that all stays on the CPU, and this tutorial +hence does not require ad GPU or GPU libraries. + +Among the return values are +- `probo`: A copy of the HybridProblem, with updated optimized parameters +- `interpreters`: A `NamedTuple` with several `ComponentArrayInterpreter`s that +will help analyzing the results. + +## Using a population-level process-based model + +So far, the process-based model ran for each single site. +For this simple model, some performance grains result from matrix-computations +when running the model for all sites within one batch simultaneously. + +In the following, the PBM specification accepts matrices as arguments +for parameters and drivers +and returns a matrix of precitions. +Generally, the sites are the last dimension. So for the drivers and predictions, one column corresponds to one site. +However, for the parameters one row corresponds to one site. + +``` julia +using StaticArrays +function f_doubleMM_sites(θc_tr::CA.ComponentMatrix, xPc::CA.ComponentMatrix) + # extract several covariates from xP + S1 = view(xPc, Val(:S1), :) + S2 = view(xPc, Val(:S2), :) + # + # extract the parameters as row-repeated vectors + # θc_tr[:,:r0] is parameter r0 for each site in batch + # dot-multiplication of full matrix times row-vector repeats for each observation row + # also introduces zero for missing observations, leading to zero gradient there + is_valid = isfinite.(S1) .&& isfinite.(S2) + r0 = is_valid .* CA.getdata(θc_tr[:, Val(:r0)])' + r1 = is_valid .* CA.getdata(θc_tr[:, Val(:r1)])' + K1 = is_valid .* CA.getdata(θc_tr[:, Val(:K1)])' + K2 = is_valid .* CA.getdata(θc_tr[:, Val(:K2)])' + # each variable is a matrix (n_obs x n_site) + y = r0 .+ r1 .* S1 ./ (K1 .+ S1) .* S2 ./ (K2 .+ S2) + (y, y[1:0,:]) +end +``` + +Again, the function should not rely on the order of parameters but use symbolic indexing +to extract the parameter vectors. + +A corresponding [`PBMPopulationApplicator`](@ref) transforms calls with +partitioned global and site parameters to calls of this matrix version of the PBM. +The HVI Problem needs to be updated with this new applicatior. + +``` julia +f_batch = PBMPopulationApplicator(f_doubleMM_sites, n_batch; θP, θM, θFix, xPvec=xP[:,1]) +probo_sites = HybridProblem(probo; f_batch) +``` + +For numerical efficiency, the number of sites within one batch is part of the +`PBMPopulationApplicator`. The problem stores an applicator for `n_batch` sites, +however, an applicator for `n_site_pred` sites can be obtained by +`create_nsite_applicator(f_batch, n_site_pred)`. + +``` julia +(; probo) = solve(probo_sites, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 20, + #is_inferred = Val(true), # activate type-checks +); +``` + +## Saving the results + +Extracting useful information from the optimized HybridProblem is covered +in the following [Inspect results of fitted problem](@ref) tutorial. +In order to use the results from this tutorial in other tutorials, +the updated `probo` `HybridProblem` and the interpreters are saved to a JLD2 file. + +Before the problem is updated, it uses the redefinition [`DoubleMM.f_doubleMM_sites`](@ref) +of the PBM in module `DoubleMM` rather than what we defined in +module `Main` to allow for easier reloading with JLD2. + +``` julia +f_batch = PBMPopulationApplicator(DoubleMM.f_doubleMM_sites, n_batch; θP, θM, θFix, xPvec=xP[:,1]) +probo2 = HybridProblem(probo; f_batch) +``` + +``` julia +using JLD2 +fname = "intermediate/basic_cpu_mean_results.jld2" +mkpath("intermediate") +if probo2 isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo=probo2, interpreters) +end +``` diff --git a/docs/src/tutorials/basic_cpu_mean.qmd b/docs/src/tutorials/basic_cpu_mean.qmd new file mode 100644 index 0000000..7dc45cd --- /dev/null +++ b/docs/src/tutorials/basic_cpu_mean.qmd @@ -0,0 +1,420 @@ +--- +title: "Basic workflow without GPU using MeanHVIApproximation" +engine: julia +julia: + exeflags: ["+1.11"] +execute: + echo: true + output: false + daemon: 3600 +format: + commonmark: + variant: -raw_html+tex_math_dollars + wrap: preserve +bibliography: twutz_txt.bib +--- + +``` @meta +CurrentModule = HybridVariationalInference +``` + +Similar to the [Basic workflow without GPU](@ref), but using an approximation of +the posterior, that does not estimate a site-specific scaling factor of +the uncertainty, but estimates uncertainty scaling with the parameter value +at unconstrained scale. + +First load necessary packages. +```{julia} +using HybridVariationalInference +using HybridVariationalInference: HybridVariationalInference as HVI +using ComponentArrays: ComponentArrays as CA +using Bijectors +using StableRNGs +using SimpleChains +using StatsFuns +using MLUtils +using DistributionFits +using UnPack +``` + +Next, specify many moving parts of the Hybrid variational inference (HVI) + +## The process-based model +The example process based model (PBM) predicts a double-monod constrained rate +for different substrate concentrations, `S1`, and `S2`. + +$$ +y = r_0+ r_1 \frac{S_1}{K_1 + S_1} \frac{S_2}{K_2 + S_2} +$$ + +```{julia} +function f_doubleMM(θc::CA.ComponentVector{ET}, x) where ET + # extract parameters not depending on order, i.e whether they are in θP or θM + @unpack r0, r1, K1, K2 = θc + y = r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) + (y, y[1:0]) +end +``` + +Its formulation is independent of which parameters are global, site-specific, +or fixed during the model inversion. +However, it cannot assume an ordering in the parameters, but needs to +access the components by its symbolic names in the provided `ComponentArray`. + +In addition to the predictions, the PBM returns additional quantities that may +be used in penalizing unrealistic conditions independent of observations. +In this simple example, just an empty vector is returned. + +## Likelihood function + +HVI requires the evaluation of the likelihood of the predictions. +It corresponds to the cost of predictions given some observations. + +The user specifies a function of the negative log-Likelihood +`neg_logden(obs, pred, uncertainty_parameters)`, +where all of the parameters are arrays with columns for sites. + +Here, we use the [`neg_logden_indep_normal`](@ref) function +that assumed observations to be distributed independently +normal around a true value. +The provided `y_unc` uncertainty parameters, here, corresponds to +`logσ2`, denoting the log of the variance parameter of the normal distribution. + +```{julia} +py = neg_logden_indep_normal +``` + +## Global-Site, transformations, and priors +### Global and site-specific parameters +In this example, we will assign a fixed value to r0 parameter, treat +the K2 parameter as unknown but the same across sites, and predict +r1 and K1 for each site separately, based on covariates known at the sites. + +Here we provide initial values for them by using `ComponentVector`. + +```{julia} +FT = Float32 +θM0 = θM = CA.ComponentVector{FT}(r1=0.5, K1=0.2) # separately for each individual +θP0 = θP = CA.ComponentVector{FT}(K2=2.0) # population: same across individuals, +θFix = CA.ComponentVector{FT}(r0=0.3) # r0, i.e. not estimated +``` +### Parameter Transformations +HVI allows for transformations of parameters in an unconstrained space, +where the probability density is not strictly zero anywhere to the original +constrained space. + +Here, our model parameters are strictly positive, and we use the exponential function +to transform unconstrained estimates to the original constrained domain. + +```{julia} +transP = Stacked(HVI.Exp()) +transM = Stacked(HVI.Exp(), HVI.Exp()) +``` + +Parameter transformations are specified using the `Bijectors` package. +Because, `Bijectors.elementwise(exp)`, has problems with automatic differentiation (AD) +on GPU, we use the public but non-exported [`Exp`](@ref) wrapper inside `Bijectors.Stacked`. + +### Prior information on parameters at constrained scale + +HVI is an approximate bayesian analysis and combines prior information on +the parameters with the model and observed data. + +Here, we provide a wide prior by fitting a Lognormal distributions to +- the mode corresponding to the initial value provided above +- the 0.95-quantile 3 times the mode +using the `DistributionFits.jl` package. + +```{julia} +θall = vcat(θP, θM) +priors_dict = Dict{Symbol, Distribution}( + keys(θall) .=> fit.(LogNormal, θall, QuantilePoint.(θall .* 3, 0.95), Val(:mode))) +``` + +## Observations, model drivers and covariates + +The model parameters are inverted using information on the +- observed data, `y_o` +- its uncertainty, `y_unc` +- known covariates across sites, `xM` +- model drivers, `xP` +Here, we use synthetic data generated by the package. + +```{julia} +rng = StableRNG(111) +n_site_test = 60 +(; xM, xP, y_o, y_unc) = gen_hybridproblem_synthetic( + rng, DoubleMM.DoubleMMCase(); n_site_test, scenario=Val((:omit_r0,))) +n_site = size(y_o,2) - n_site_test +i_test = n_site .+ (1:n_site_test) +i_train = 1:n_site +test_data = (; xM = xM[:, i_test], xP = xP[:, i_test], + y_o = y_o[:, i_test], y_unc = y_unc[:, i_test], i_site = i_test) +train_data = (; xM = xM[:, i_train], xP = xP[:, i_train], + y_o = y_o[:, i_train], y_unc = y_unc[:, i_train], i_site = i_train) +``` + +```{julia} +#| echo: false +#| eval: false +() -> begin + (; xM, θP_true, θMs_true, xP, y_true, y_o, y_unc) = + gen_hybridproblem_synthetic(rng, DoubleMM.DoubleMMCase(); scenario=Val((:omit_r0,))) +end +``` + +Lets look at them. +```{julia} +#| output: true +map(size,train_data) +``` +All of them have 800 columns, corresponding to 800 sites. +There are 5 site-covaritas, 16 values of model drivers, and 8 observations per site. + +```{julia} +#| output: true +xP[:,1] +``` +In each column of the model drivers there is a ComponentVector with +components S1 and S2 corresponding to the concentrations, for which outputs +were observed. +This allows notation `x.S1` in the PBM above. + +The `y_unc` becomes its meaning by the Likelihood-function to be specified with +the problem below. + +### Providing data in batches + +HVI uses `MLUtils.DataLoader` to provide batches of the data during each +iteration of the solver. In addition to the data, it provides an +index to the sites inside a tuple. + +```{julia} +n_batch = 20 +train_dataloader = MLUtils.DataLoader( + CA.getdata.(values(train_data)), batchsize=n_batch, partial=false) +``` + +## The Machine-Learning model + +The machine-learning (ML) part predicts parameters of the posterior of site-specific +PBM parameters, given the covariates. +Here, we specify a 3-layer feed-forward neural network using the `SimpleChains` +framework which works efficiently on CPU. + +```{julia} +n_out = length(θM) # number of individuals to predict +n_input = n_covar = size(xM,1) + +g_chain = SimpleChain( + static(n_input), # input dimension (optional) + TurboDense{true}(tanh, n_input * 4), + TurboDense{true}(tanh, n_input * 4), + # dense layer without bias that maps to n outputs to (0..1) + TurboDense{false}(logistic, n_out) +) +# get a template of the parameter vector, ϕg0 +g_chain_app, ϕg0 = construct_ChainsApplicator(rng, g_chain) +``` + +The `g_chain_app` `ChainsApplicator` predicts the parameters of the posterior, +approximation given a vector of ML weights,`ϕg`. +During construction, an initial template of this vector is created. +This abstraction layer allows to use different ML frameworks and replace the +`SimpleChains` model by `Flux` or `Lux`. + +### Using priors to scale ML-parameter estimates + +In order to balance gradients, the `g_chain_app` ModelApplicator defined above +predicts on a scale (0..1). +Now the priors are used to translate this to the parameter range by using the +cumulative density distribution. + +Priors were specified at constrained scale, but the ML model predicts +parameters on unconstrained scale. +This transformation of the distribution can be mathematically worked out for +specific prior distribution forms. +However, for simplicity, a [`NormalScalingModelApplicator`](@ref) +is fitted to the transformed 5% and 95% quantiles of the original prior. + +```{julia} +priorsM = Tuple(priors_dict[k] for k in keys(θM)) +lowers, uppers = get_quantile_transformed(priorsM, transM) +g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT) +``` + +The `g_chain_scaled` `ModelApplicator` now predicts in unconstrained scale, +transforms logistic predctions around 0.5 to the range of +high prior probability of the parameters, +and transforms ML predictions near 0 or 1 towards the outer lower probability ranges. + + +## Assembling the information + +All the specifications above are stored in a [`HybridProblem`](@ref) structure. + +Before, a [`PBMSiteApplicator`](@ref) is constructed that translates an invocation +given a vector of global parameters, and a matrix of site parameters to +invocation of the process based model (PBM), defined at the beginning. + +```{julia} +approx = MeanHVIApproximation() +f_batch = PBMSiteApplicator(f_doubleMM; θP, θM, θFix, xPvec=xP[:,1]) +(;ϕqc, approx) = init_hybrid_ϕq(approx, θP, θM, transP; n_site, transM) + +prob = HybridProblem(θM, ϕqc, g_chain_scaled, ϕg0, + f_batch, priors_dict, py, + transM, transP, train_dataloader, test_data, n_site, n_batch; approx) +``` + +```{julia} +#| eval: false +#| echo: false + +# test invoking +#θMs = stack(Iterators.repeated(θM, n_batch); dims=1) +θMs = θM' .+ (randn(n_batch, size(θM,1)) .* 0.05) +x_batch = xP[:,1:n_batch] +y1 = f_batch(CA.getdata(θP), CA.getdata(θMs), CA.getdata(x_batch))[2] + + +() -> begin + y1 - y_o[:,1:n_batch] # check size and roughly equal + #using Test + #@inferred f_batch(CA.getdata(θP), CA.getdata(θMs), CA.getdata(x_batch))[2] + @inferred Vector{Float64} f_batch(CA.getdata(θP), CA.getdata(θMs), CA.getdata(x_batch))[2] + #using Cthulhu + #@descend_code_warntype f_batch(CA.getdata(θP), CA.getdata(θMs), CA.getdata(x_batch)) + prob0 = HVI.DoubleMM.DoubleMMCase() + f_batch0 = get_hybridproblem_PBmodel(prob0) + y1f = f_batch0(θP, θMs, x_batch)[2] + y1 .- y1f # equal +end +``` + +## Perform the inversion + +Eventually, having assembled all the moving parts of the HVI, we can perform +the inversion. + +```{julia} +# silence warning of no GPU backend found (because we did not import CUDA here) +ENV["MLDATADEVICES_SILENCE_WARN_NO_GPU"] = 1 +``` + +```{julia} +using OptimizationOptimisers +import Zygote + +solver = HybridPosteriorSolver(; alg=Adam(0.02), n_MC=3) + +(; probo, interpreters) = solve(prob, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 2, +); +``` + +The solver object is constructed given the specific stochastic optimization algorithm +and the number of Monte-Carlo samples that are drawn in each iteration +from the predicted parameter posterior. + +Then the solver is applied to the problem using [`solve`](@ref) +for a given number of iterations or epochs. +For this tutorial, we additionally specify that the function to transfer structures to +the GPU is the identity function, so that all stays on the CPU, and this tutorial +hence does not require ad GPU or GPU libraries. + +Among the return values are +- `probo`: A copy of the HybridProblem, with updated optimized parameters +- `interpreters`: A `NamedTuple` with several `ComponentArrayInterpreter`s that + will help analyzing the results. + +## Using a population-level process-based model + +So far, the process-based model ran for each single site. +For this simple model, some performance grains result from matrix-computations +when running the model for all sites within one batch simultaneously. + +In the following, the PBM specification accepts matrices as arguments +for parameters and drivers +and returns a matrix of precitions. +Generally, the sites are the last dimension. So for the drivers and predictions, one column corresponds to one site. +However, for the parameters one row corresponds to one site. + + +```{julia} +using StaticArrays +function f_doubleMM_sites(θc_tr::CA.ComponentMatrix, xPc::CA.ComponentMatrix) + # extract several covariates from xP + S1 = view(xPc, Val(:S1), :) + S2 = view(xPc, Val(:S2), :) + # + # extract the parameters as row-repeated vectors + # θc_tr[:,:r0] is parameter r0 for each site in batch + # dot-multiplication of full matrix times row-vector repeats for each observation row + # also introduces zero for missing observations, leading to zero gradient there + is_valid = isfinite.(S1) .&& isfinite.(S2) + r0 = is_valid .* CA.getdata(θc_tr[:, Val(:r0)])' + r1 = is_valid .* CA.getdata(θc_tr[:, Val(:r1)])' + K1 = is_valid .* CA.getdata(θc_tr[:, Val(:K1)])' + K2 = is_valid .* CA.getdata(θc_tr[:, Val(:K2)])' + # each variable is a matrix (n_obs x n_site) + y = r0 .+ r1 .* S1 ./ (K1 .+ S1) .* S2 ./ (K2 .+ S2) + (y, y[1:0,:]) +end +``` + +Again, the function should not rely on the order of parameters but use symbolic indexing +to extract the parameter vectors. + +A corresponding [`PBMPopulationApplicator`](@ref) transforms calls with +partitioned global and site parameters to calls of this matrix version of the PBM. +The HVI Problem needs to be updated with this new applicatior. + +```{julia} +f_batch = PBMPopulationApplicator(f_doubleMM_sites, n_batch; θP, θM, θFix, xPvec=xP[:,1]) +probo_sites = HybridProblem(probo; f_batch) +``` + +For numerical efficiency, the number of sites within one batch is part of the +`PBMPopulationApplicator`. The problem stores an applicator for `n_batch` sites, +however, an applicator for `n_site_pred` sites can be obtained by +`create_nsite_applicator(f_batch, n_site_pred)`. + +```{julia} +(; probo) = solve(probo_sites, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 20, + #is_inferred = Val(true), # activate type-checks +); +``` + +## Saving the results +Extracting useful information from the optimized HybridProblem is covered +in the following [Inspect results of fitted problem](@ref) tutorial. +In order to use the results from this tutorial in other tutorials, +the updated `probo` `HybridProblem` and the interpreters are saved to a JLD2 file. + +Before the problem is updated, it uses the redefinition [`DoubleMM.f_doubleMM_sites`](@ref) +of the PBM in module `DoubleMM` rather than what we defined in +module `Main` to allow for easier reloading with JLD2. + +```{julia} +f_batch = PBMPopulationApplicator(DoubleMM.f_doubleMM_sites, n_batch; θP, θM, θFix, xPvec=xP[:,1]) +probo2 = HybridProblem(probo; f_batch) +``` + +```{julia} +using JLD2 +fname = "intermediate/basic_cpu_mean_results.jld2" +mkpath("intermediate") +if probo2 isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo=probo2, interpreters) +end +``` + +```{julia} +#| eval: false +#| echo: false +probo = load(fname, "probo"; iotype = IOStream); +``` diff --git a/docs/src/tutorials/blocks_corr.md b/docs/src/tutorials/blocks_corr.md index 75459aa..203076a 100644 --- a/docs/src/tutorials/blocks_corr.md +++ b/docs/src/tutorials/blocks_corr.md @@ -86,7 +86,7 @@ Check that the new specification uses fewer parameters. length(get_hybridproblem_ϕq(prob)), length(get_hybridproblem_ϕq(prob_ind)) ``` - (7, 6) + (4, 3) ``` julia using OptimizationOptimisers @@ -106,10 +106,10 @@ First, draw a sample. ``` julia n_sample_pred = 400 -(y_cor, θsP_cor, θsMs_tr_cor) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_cor; n_sample_pred) -(y_ind, θsP_ind, θsMs_tr_ind) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_ind; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_cor; n_sample_pred) +(y_cor, θsP_cor, θsMs_tr_cor) = (y, θsP, θsMs_tr) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_ind; n_sample_pred) +(y_ind, θsP_ind, θsMs_tr_ind) = (y, θsP, θsMs_tr) ``` ``` julia diff --git a/docs/src/tutorials/blocks_corr.qmd b/docs/src/tutorials/blocks_corr.qmd index e5f2c71..e18a200 100644 --- a/docs/src/tutorials/blocks_corr.qmd +++ b/docs/src/tutorials/blocks_corr.qmd @@ -109,10 +109,10 @@ solver = HybridPosteriorSolver(; alg=Adam(0.02), n_MC=3) First, draw a sample. ```{julia} n_sample_pred = 400 -(y_cor, θsP_cor, θsMs_tr_cor) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_cor; n_sample_pred) -(y_ind, θsP_ind, θsMs_tr_ind) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_ind; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_cor; n_sample_pred) +(y_cor, θsP_cor, θsMs_tr_cor) = (y, θsP, θsMs_tr) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_ind; n_sample_pred) +(y_ind, θsP_ind, θsMs_tr_ind) = (y, θsP, θsMs_tr) ``` ```{julia} diff --git a/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-10-output-1.png b/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-10-output-1.png index d620c2b..8a647e7 100644 Binary files a/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-10-output-1.png and b/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-10-output-1.png differ diff --git a/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-11-output-1.png b/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-11-output-1.png index c99b0d6..260c17a 100644 Binary files a/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-11-output-1.png and b/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-11-output-1.png differ diff --git a/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-12-output-1.png b/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-12-output-1.png index 23e29ce..6dcd287 100644 Binary files a/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-12-output-1.png and b/docs/src/tutorials/blocks_corr_files/figure-commonmark/cell-12-output-1.png differ diff --git a/docs/src/tutorials/corr_site_global.md b/docs/src/tutorials/corr_site_global.md index 2bc5aed..223f1d7 100644 --- a/docs/src/tutorials/corr_site_global.md +++ b/docs/src/tutorials/corr_site_global.md @@ -114,10 +114,10 @@ First, draw a sample. ``` julia n_sample_pred = 400 -(y_cond, θsP_cond, θsMs_cond) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_cond; n_sample_pred) -(y_uncond, θsP_uncond, θsMs_uncond) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_uncond; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_cond; n_sample_pred) +(y_cond, θsP_cond, θsMs_cond) = (y, θsP, θsMs_tr) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_uncond; n_sample_pred) +(y_uncond, θsP_uncond, θsMs_uncond) = (y, θsP, θsMs_tr) ``` ``` julia diff --git a/docs/src/tutorials/corr_site_global.qmd b/docs/src/tutorials/corr_site_global.qmd index 774bbd0..19246cf 100644 --- a/docs/src/tutorials/corr_site_global.qmd +++ b/docs/src/tutorials/corr_site_global.qmd @@ -123,10 +123,10 @@ solver = HybridPosteriorSolver(; alg=Adam(0.02), n_MC=3) First, draw a sample. ```{julia} n_sample_pred = 400 -(y_cond, θsP_cond, θsMs_cond) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_cond; n_sample_pred) -(y_uncond, θsP_uncond, θsMs_uncond) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_uncond; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_cond; n_sample_pred) +(y_cond, θsP_cond, θsMs_cond) = (y, θsP, θsMs_tr) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_uncond; n_sample_pred) +(y_uncond, θsP_uncond, θsMs_uncond) = (y, θsP, θsMs_tr) ``` ```{julia} diff --git a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-10-output-1.png b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-10-output-1.png index c63a6a8..d507178 100644 Binary files a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-10-output-1.png and b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-10-output-1.png differ diff --git a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-11-output-1.png b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-11-output-1.png index d0dcacb..e12851f 100644 Binary files a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-11-output-1.png and b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-11-output-1.png differ diff --git a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-12-output-1.png b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-12-output-1.png index af449cf..41f9a07 100644 Binary files a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-12-output-1.png and b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-12-output-1.png differ diff --git a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-9-output-1.png b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-9-output-1.png index 5faf3f8..6fd62cc 100644 Binary files a/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-9-output-1.png and b/docs/src/tutorials/corr_site_global_files/figure-commonmark/cell-9-output-1.png differ diff --git a/docs/src/tutorials/inspect_results.md b/docs/src/tutorials/inspect_results.md index 468b4e7..2413deb 100644 --- a/docs/src/tutorials/inspect_results.md +++ b/docs/src/tutorials/inspect_results.md @@ -89,6 +89,9 @@ covariances among parameters. Lets look at how the estimated uncertainty of a site parameter changes with its expected value. +For each site compute across the samples the mean and the +standard deviation. + ``` julia par = :K1 θmean = [mean(θsMs_tr[s,par,:]) for s in axes(θsMs_tr, 1)] @@ -106,19 +109,16 @@ values of the parameter. ## Correlations among site parameters at uncronstrained scale -The features a correlation matrix of site parameters at unconstrained scale. +The posterior approximation uses a correlation matrix +of site parameters at unconstrained scale that is assumed to +be equal across sites. It can be extracted using function [`get_hybridproblem_correlation_Ms`](@ref). -In a first implementation, this function operates on an `AbstractHybridProblem` -assuming that its returned `ϕq` contains a component `ρsM` -that is used to parameterize the +The actual inversion estimates parameters in component `ρsM` that construct the Cholesky factor of the correlation matrix. ``` julia -CM = HVI.get_hybridproblem_correlation_Ms(probo) -fig = Figure(); ax = Axis(fig[1,1], xlabel="mean($par)",ylabel="sd($par)") -scatter!(ax, θmean, θsd) -fig +CM = get_hybridproblem_correlation_Ms(probo) ``` ## Predictive Posterior @@ -132,7 +132,7 @@ sampling the posterior and predictive posterior and returns the additional `NamedTuple` entry `y`. ``` julia -(; y, θsP, θsMs_tr) = predict_hvi(rng, probo; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(rng, probo; n_sample_pred); ``` ``` julia @@ -157,10 +157,10 @@ scatter!(ax, ymean, ysd) fig ``` -![](inspect_results_files/figure-commonmark/cell-13-output-1.png) +![](inspect_results_files/figure-commonmark/cell-14-output-1.png) We see that observed values for associated substrate concentrations range about from -0.51 to 0.59 with an estimated standard deviation around 0.005 that decreases +0.5 to 0.6 with an estimated standard deviation around 0.005 that decreases with the observed value. If only a point prediction is required, function [`predict_point_hvi`](@ref) diff --git a/docs/src/tutorials/inspect_results.qmd b/docs/src/tutorials/inspect_results.qmd index 51910f9..6bfd346 100644 --- a/docs/src/tutorials/inspect_results.qmd +++ b/docs/src/tutorials/inspect_results.qmd @@ -119,6 +119,9 @@ probo.θP, probo.θM Lets look at how the estimated uncertainty of a site parameter changes with its expected value. +For each site compute across the samples the mean and the +standard deviation. + ```{julia} #| output: true par = :K1 @@ -134,20 +137,18 @@ its estimated uncertainty is about 0.04, slightly decreasing with the values of the parameter. ## Correlations among site parameters at uncronstrained scale -The features a correlation matrix of site parameters at unconstrained scale. +The posterior approximation uses a correlation matrix +of site parameters at unconstrained scale that is assumed to +be equal across sites. It can be extracted using function [`get_hybridproblem_correlation_Ms`](@ref). -In a first implementation, this function operates on an `AbstractHybridProblem` -assuming that its returned `ϕq` contains a component `ρsM` -that is used to parameterize the +The actual inversion estimates parameters in component `ρsM` that construct the Cholesky factor of the correlation matrix. -``` julia -CM = HVI.get_hybridproblem_correlation_Ms(probo) -fig = Figure(); ax = Axis(fig[1,1], xlabel="mean($par)",ylabel="sd($par)") -scatter!(ax, θmean, θsd) -fig +```{julia} +CM = get_hybridproblem_correlation_Ms(probo) ``` + ## Predictive Posterior In addition to the uncertainty in parameters, we are also interested in @@ -159,7 +160,7 @@ sampling the posterior and predictive posterior and returns the additional `NamedTuple` entry `y`. ```{julia} -(; y, θsP, θsMs_tr) = predict_hvi(rng, probo; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(rng, probo; n_sample_pred); ``` ```{julia} @@ -184,10 +185,9 @@ scatter!(ax, ymean, ysd) fig ``` We see that observed values for associated substrate concentrations range about from -0.51 to 0.59 with an estimated standard deviation around 0.005 that decreases +0.5 to 0.6 with an estimated standard deviation around 0.005 that decreases with the observed value. - If only a point prediction is required, function [`predict_point_hvi`](@ref) can be used, that returns a single set of expected parameters and corresponding predictions. diff --git a/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-10-output-1.png b/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-10-output-1.png index 781c794..92ef5f0 100644 Binary files a/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-10-output-1.png and b/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-10-output-1.png differ diff --git a/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-14-output-1.png b/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-14-output-1.png index f994f9e..66844db 100644 Binary files a/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-14-output-1.png and b/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-14-output-1.png differ diff --git a/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-8-output-1.png b/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-8-output-1.png index 58f5511..163116d 100644 Binary files a/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-8-output-1.png and b/docs/src/tutorials/inspect_results_files/figure-commonmark/cell-8-output-1.png differ diff --git a/docs/src/tutorials/intermediate/basic_cpu_mean_results.jld2 b/docs/src/tutorials/intermediate/basic_cpu_mean_results.jld2 new file mode 100644 index 0000000..0db034a Binary files /dev/null and b/docs/src/tutorials/intermediate/basic_cpu_mean_results.jld2 differ diff --git a/docs/src/tutorials/intermediate/basic_cpu_results.jld2 b/docs/src/tutorials/intermediate/basic_cpu_results.jld2 index 3a83818..4a62651 100644 Binary files a/docs/src/tutorials/intermediate/basic_cpu_results.jld2 and b/docs/src/tutorials/intermediate/basic_cpu_results.jld2 differ diff --git a/docs/src/tutorials/logden_user.md b/docs/src/tutorials/logden_user.md index 786d34c..5f7db13 100644 --- a/docs/src/tutorials/logden_user.md +++ b/docs/src/tutorials/logden_user.md @@ -116,10 +116,10 @@ the inversion assuming loglornally distributed observation errors. ``` julia n_sample_pred = 400 -(y_normal, θsP_normal, θsMs_normal) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_normal; n_sample_pred) -(y_lognormal, θsP_lognormal, θsMs_lognormal) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_lognormal; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_normal; n_sample_pred) +(y_normal, θsP_normal, θsMs_normal) = (y, θsP, θsMs_tr) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_lognormal; n_sample_pred) +(y_lognormal, θsP_lognormal, θsMs_lognormal) = (y, θsP, θsMs_tr) ``` Get the original observations from the DataLoader of the problem, and diff --git a/docs/src/tutorials/logden_user.qmd b/docs/src/tutorials/logden_user.qmd index b0762ba..23b86e2 100644 --- a/docs/src/tutorials/logden_user.qmd +++ b/docs/src/tutorials/logden_user.qmd @@ -125,10 +125,10 @@ the inversion assuming loglornally distributed observation errors. ```{julia} n_sample_pred = 400 -(y_normal, θsP_normal, θsMs_normal) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_normal; n_sample_pred) -(y_lognormal, θsP_lognormal, θsMs_lognormal) = (; y, θsP, θsMs_tr) = predict_hvi( - Random.default_rng(), probo_lognormal; n_sample_pred) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_normal; n_sample_pred) +(y_normal, θsP_normal, θsMs_normal) = (y, θsP, θsMs_tr) +(; y, θsP, θsMs_tr) = predict_hvi(Random.default_rng(), probo_lognormal; n_sample_pred) +(y_lognormal, θsP_lognormal, θsMs_lognormal) = (y, θsP, θsMs_tr) ``` Get the original observations from the DataLoader of the problem, and diff --git a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-10-output-1.png b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-10-output-1.png index a2dca56..2a4c704 100644 Binary files a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-10-output-1.png and b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-10-output-1.png differ diff --git a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-11-output-1.png b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-11-output-1.png index a8260f2..8222378 100644 Binary files a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-11-output-1.png and b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-11-output-1.png differ diff --git a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-12-output-1.png b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-12-output-1.png index 98ad217..8c0988b 100644 Binary files a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-12-output-1.png and b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-12-output-1.png differ diff --git a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-8-output-1.png b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-8-output-1.png index 1254ae0..bbf97cb 100644 Binary files a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-8-output-1.png and b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-8-output-1.png differ diff --git a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-9-output-1.png b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-9-output-1.png index b996186..179bbef 100644 Binary files a/docs/src/tutorials/logden_user_files/figure-commonmark/cell-9-output-1.png and b/docs/src/tutorials/logden_user_files/figure-commonmark/cell-9-output-1.png differ diff --git a/docs/src/tutorials/lux_gpu.md b/docs/src/tutorials/lux_gpu.md index b48433f..a1e858f 100644 --- a/docs/src/tutorials/lux_gpu.md +++ b/docs/src/tutorials/lux_gpu.md @@ -49,7 +49,9 @@ Note that all the setup is almost the same, as in the basic workflow. The only difference is that a `Lux.Chains` object is provided to `construct_ChainsApplicator`. ``` julia -n_out = length(prob.θM) # number of individuals to predict + +#n_out = length(prob.θM) # number of individuals to predict +n_out = get_numberof_MLinputs(prob.approx, prob.θM) n_covar = 5 #size(xM,1) n_input = n_covar @@ -65,7 +67,8 @@ g_chain_app, ϕg0 = construct_ChainsApplicator(rng, g_lux) priorsM = Tuple(prob.priors[k] for k in keys(prob.θM)) lowers, uppers = get_quantile_transformed(priorsM, prob.transM) FT = eltype(prob.θM) -g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT) +range_scaled = 1:length(lowers) # do only scale means, but not the uncertainty factor +g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT; range_scaled) ``` Update the `HybridProblem` to use this ML model. @@ -117,12 +120,13 @@ The sampling and prediction methods, also take this `gdevs` keyword argument. ``` julia n_sample_pred = 400 -(y_dev, θsP_dev, θsMs_dev) = (; y, θsP, θsMs_tr) = predict_hvi( - rng, probo_lux; n_sample_pred, - gdevs = (; gdev_M=gpu_device(), gdev_P=gpu_device())); +(; y, θsP, θsMs_tr) = predict_hvi(rng, probo_lux; n_sample_pred, + gdevs = (; gdev_M=gpu_device(), gdev_P=gpu_device())); +(y_dev, θsP_dev, θsMs_dev) = (y, θsP, θsMs_tr) ``` If `gdev_P` is not an `AbstractGPUDevice` then all the results are on CPU. +This is the case, if running this tutorial on a machine without GPU/CUDA setup. If `gdev_P` is an `AbstractGPUDevice` then the results are GPUArrays and need to be transferred to CPU. diff --git a/docs/src/tutorials/lux_gpu.qmd b/docs/src/tutorials/lux_gpu.qmd index 4ad3545..d3acd18 100644 --- a/docs/src/tutorials/lux_gpu.qmd +++ b/docs/src/tutorials/lux_gpu.qmd @@ -60,7 +60,9 @@ Note that all the setup is almost the same, as in the basic workflow. The only difference is that a `Lux.Chains` object is provided to `construct_ChainsApplicator`. ```{julia} -n_out = length(prob.θM) # number of individuals to predict + +#n_out = length(prob.θM) # number of individuals to predict +n_out = get_numberof_MLinputs(prob.approx, prob.θM) n_covar = 5 #size(xM,1) n_input = n_covar @@ -76,7 +78,8 @@ g_chain_app, ϕg0 = construct_ChainsApplicator(rng, g_lux) priorsM = Tuple(prob.priors[k] for k in keys(prob.θM)) lowers, uppers = get_quantile_transformed(priorsM, prob.transM) FT = eltype(prob.θM) -g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT) +range_scaled = 1:length(lowers) # do only scale means, but not the uncertainty factor +g_chain_scaled = NormalScalingModelApplicator(g_chain_app, lowers, uppers, FT; range_scaled) ``` Update the `HybridProblem` to use this ML model. @@ -135,12 +138,13 @@ The sampling and prediction methods, also take this `gdevs` keyword argument. ```{julia} n_sample_pred = 400 -(y_dev, θsP_dev, θsMs_dev) = (; y, θsP, θsMs_tr) = predict_hvi( - rng, probo_lux; n_sample_pred, +(; y, θsP, θsMs_tr) = predict_hvi(rng, probo_lux; n_sample_pred, gdevs = (; gdev_M=gpu_device(), gdev_P=gpu_device())); +(y_dev, θsP_dev, θsMs_dev) = (y, θsP, θsMs_tr) ``` -If `gdev_P` is not an `AbstractGPUDevice` then all the results are on CPU. +If `gdev_P` is not an `AbstractGPUDevice` then all the results are on CPU. +This is the case when running this tutorial on a machine without GPU/CUDA setup. If `gdev_P` is an `AbstractGPUDevice` then the results are GPUArrays and need to be transferred to CPU. diff --git a/docs/src/tutorials/penalty.md b/docs/src/tutorials/penalty.md new file mode 100644 index 0000000..bc4b17e --- /dev/null +++ b/docs/src/tutorials/penalty.md @@ -0,0 +1,163 @@ +# How to specify custom Penalties + + +``` @meta +CurrentModule = HybridVariationalInference +``` + +This guide shows how the user can specify a customized penalties to help +the solver to converge to global minimum. + +## Motivation + +The basic cost in HVI is the negative log of the joint probability, i.e. +the likelihood of the observations given the parameters \* prior probability +of the parameters. + +Sometimes there is additional knowledge not encoded in the prior, such as +one parameter must be larger than another, or entropy-weights of the +ML-parameters, and the solver accept a function to add additional loss terms. +The loglikelihood function assigns a cost to the mismatch between predictions and +observations. This often needs to be customized to the specific inversion. + +This guide walks through the specification of such additional penalties. + +First load necessary packages. + +``` julia +using HybridVariationalInference +using SimpleChains +using ComponentArrays: ComponentArrays as CA +using JLD2 +import StableRNGs +``` + +This tutorial reuses and modifies the fitted object saved at the end of the +[Basic workflow without GPU](@ref) tutorial, that used a log-Likelihood +function assuming observation error to be distributed independently normal. + +``` julia +fname = "intermediate/basic_cpu_results.jld2" +print(abspath(fname)) +prob = probo_normal = load(fname, "probo"); +``` + +## Write function to compute the penalty loss + +The function signature corresponds to the one described in [`compute_penalty`](@ref). + +In this example we want to avoid local minima when parameter, `r1`, is larger than +70% of the maximum observation. + +``` julia +# compute the maximum of observed rates at each site +y_obs = get_hybridproblem_train_dataloader(prob).data[3] +const y_obs_max = map(col -> maximum(x -> isfinite(x) ? x : zero(x), col), eachcol(y_obs)) + +function compute_penalty_r1(y_pred::AbstractMatrix, addq_pred::AbstractMatrix, + θMs_tr::AbstractMatrix, θP::AbstractVector, i_sites, + ϕg, ϕq::AbstractVector) + # get the maximum of current batch from closure of this function + y_obs_max_sites = y_obs_max[i_sites] + # add a penalty if r1 is larger than 0.95 times the maximum + penalty = max.(zero(eltype(θMs_tr)), θMs_tr[:,:r1] .- 0.95 .* y_obs_max_sites) + (; penalty) +end +``` + +The PenaltyComputer receives argument, `i_sites`, which can be used to index precomputed observation maxima. + +## Update the problem and redo the inversion + +HybridProblem has keyword argument `penalty_computer` to specify the Callable +that computes the penalty. It defaults to `ZeroPenaltyComputer`, which +returns zero penalty cost. + +We can pass the function directly or alternatively construct a [`CustomPenaltyComputer`](@ref) and update the problem. + +``` julia +#prob_pen = HybridProblem(prob; penalty_computer = compute_penalty_r1) +penalty_computer = CustomPenaltyComputer(compute_penalty_r1) +prob_pen = HybridProblem(prob; penalty_computer) + +using OptimizationOptimisers +import Zygote +# silence warning of no GPU backend found (because we did not import CUDA here) +ENV["MLDATADEVICES_SILENCE_WARN_NO_GPU"] = 1 + +# first run a few iterators with updating only optimizing the mean +solver_point = HybridPointSolver(; alg=Adam(0.02)) +(; probo) = solve(prob_pen, solver_point; + callback = callback_loss(100), # output during fitting + epochs = 5, +); probo_pen_point = probo; + +# starting from this, also estimate the posterior uncertainty parameters +solver = HybridPosteriorSolver(; alg=Adam(0.02), n_MC=3) +(; probo) = solve(probo_pen_point, solver; + callback = callback_loss(100), # output during fitting + epochs = 5, +); +``` + +## Inspect the computed maxima + +Function predic_hvi also evaluates the penalties. Internally, the penalty function is +called for each sample, but only the average is computed and returned. + +``` julia +rng = StableRNGs.StableRNG(112) +n_sample_pred = 200 +(; y, θsP, θsMs_tr, ζsP, ζsMs_tr, penalties) = predict_hvi(rng, probo; n_sample_pred); +size(penalties) +``` + +The penalties object is a ComponentMatrix, and we can look at a specific site +and a named component returned by + +``` julia +i_site = 3 +penalties[i_site, :penalty] +``` + +## Writing a customized PenaltyComputer + +In the above example, the maximum of the observations in the batch +are accesses by a global variable. + +This can be improved. The precomputed maxima can be stored +in a struct implementing type `AbstractPenaltyComputer` +and function `compute_penalty`. + +``` julia +struct R1PenaltyComputer{T} <: AbstractPenaltyComputer where T + r_max::Vector{T} +end +function R1PenaltyComputer(ys::AbstractMatrix) + r_max = 0.95 .* vec(maximum(ys; dims = 1)) + R1PenaltyComputer(r_max) +end +function HybridVariationalInference.compute_penalty( + pc::R1PenaltyComputer, + y_pred::AbstractMatrix, addq_pred::AbstractMatrix, θMs_tr::AbstractMatrix, θP::AbstractVector, + i_sites::AbstractVector, + ϕg, ϕq::AbstractVector + ) + # @assert pc.r_max[i_sites] == 0.95 .* map(col -> maximum(x -> isfinite(x) ? x : zero(x), col), eachcol(y_obs)) + # add a penalty if r1 is larger r_max + penalty = max.(zero(eltype(θMs_tr)), θMs_tr[:,:r1] .- pc.r_max[i_sites]) + (;penalty) +end + +penalty_computer = R1PenaltyComputer(y_obs) +``` + +Rerunning the inversion using with the update PenaltyComputer: + +``` julia +prob_pen = HybridProblem(probo; penalty_computer) +(; probo) = solve(prob_pen, solver; + callback = callback_loss(100), # output during fitting + epochs = 5, +); +``` diff --git a/docs/src/tutorials/penalty.qmd b/docs/src/tutorials/penalty.qmd new file mode 100644 index 0000000..81ff9df --- /dev/null +++ b/docs/src/tutorials/penalty.qmd @@ -0,0 +1,171 @@ +--- +title: "How to specify custom Penalties" +engine: julia +execute: + echo: true + output: false + daemon: 3600 +format: + commonmark: + variant: -raw_html+tex_math_dollars + wrap: preserve +bibliography: twutz_txt.bib +--- + +``` @meta +CurrentModule = HybridVariationalInference +``` + +This guide shows how the user can specify a customized penalties to help +the solver to converge to global minimum. + +## Motivation +The basic cost in HVI is the negative log of the joint probability, i.e. +the likelihood of the observations given the parameters * prior probability +of the parameters. + +Sometimes there is additional knowledge not encoded in the prior, such as +one parameter must be larger than another, or entropy-weights of the +ML-parameters, and the solver accept a function to add additional loss terms. +The loglikelihood function assigns a cost to the mismatch between predictions and +observations. This often needs to be customized to the specific inversion. + +This guide walks through the specification of such additional penalties. + +First load necessary packages. +```{julia} +using HybridVariationalInference +using SimpleChains +using ComponentArrays: ComponentArrays as CA +using JLD2 +import StableRNGs +``` + +This tutorial reuses and modifies the fitted object saved at the end of the +[Basic workflow without GPU](@ref) tutorial, that used a log-Likelihood +function assuming observation error to be distributed independently normal. + +```{julia} +fname = "intermediate/basic_cpu_results.jld2" +print(abspath(fname)) +prob = probo_normal = load(fname, "probo"); +``` + +## Write function to compute the penalty loss + +The function signature corresponds to the one described in [`compute_penalty`](@ref). + +In this example we want to avoid local minima when parameter, `r1`, is larger than +70% of the maximum observation. + +```{julia} +# compute the maximum of observed rates at each site +y_obs = get_hybridproblem_train_dataloader(prob).data[3] +const y_obs_max = map(col -> maximum(x -> isfinite(x) ? x : zero(x), col), eachcol(y_obs)) + +function compute_penalty_r1(y_pred::AbstractMatrix, addq_pred::AbstractMatrix, + θMs_tr::AbstractMatrix, θP::AbstractVector, i_sites, + ϕg, ϕq::AbstractVector) + # get the maximum of current batch from closure of this function + y_obs_max_sites = y_obs_max[i_sites] + # add a penalty if r1 is larger than 0.95 times the maximum + penalty = max.(zero(eltype(θMs_tr)), θMs_tr[:,:r1] .- 0.95 .* y_obs_max_sites) + (; penalty) +end +``` + +The PenaltyComputer receives argument, `i_sites`, which can be used to index precomputed observation maxima. + +## Update the problem and redo the inversion + +HybridProblem has keyword argument `penalty_computer` to specify the Callable +that computes the penalty. It defaults to `ZeroPenaltyComputer`, which +returns zero penalty cost. + +We can pass the function directly or alternatively construct a [`CustomPenaltyComputer`](@ref) and update the problem. + +```{julia} +#prob_pen = HybridProblem(prob; penalty_computer = compute_penalty_r1) +penalty_computer = CustomPenaltyComputer(compute_penalty_r1) +prob_pen = HybridProblem(prob; penalty_computer) + +using OptimizationOptimisers +import Zygote +# silence warning of no GPU backend found (because we did not import CUDA here) +ENV["MLDATADEVICES_SILENCE_WARN_NO_GPU"] = 1 + +# first run a few iterators with updating only optimizing the mean +solver_point = HybridPointSolver(; alg=Adam(0.02)) +(; probo) = solve(prob_pen, solver_point; + callback = callback_loss(100), # output during fitting + epochs = 5, +); probo_pen_point = probo; + +# starting from this, also estimate the posterior uncertainty parameters +solver = HybridPosteriorSolver(; alg=Adam(0.02), n_MC=3) +(; probo) = solve(probo_pen_point, solver; + callback = callback_loss(100), # output during fitting + epochs = 5, +); +``` + +## Inspect the computed maxima +Function predic_hvi also evaluates the penalties. Internally, the penalty function is +called for each sample, but only the average is computed and returned. +```{julia} +rng = StableRNGs.StableRNG(112) +n_sample_pred = 200 +(; y, θsP, θsMs_tr, ζsP, ζsMs_tr, penalties) = predict_hvi(rng, probo; n_sample_pred); +size(penalties) +``` + +The penalties object is a ComponentMatrix, and we can look at a specific site +and a named component returned by + +```{julia} +i_site = 3 +penalties[i_site, :penalty] +``` + +## Writing a customized PenaltyComputer + +In the above example, the maximum of the observations in the batch +are accesses by a global variable. + +This can be improved. The precomputed maxima can be stored +in a struct implementing type `AbstractPenaltyComputer` +and function `compute_penalty`. + +```{julia} +struct R1PenaltyComputer{T} <: AbstractPenaltyComputer where T + r_max::Vector{T} +end +function R1PenaltyComputer(ys::AbstractMatrix) + r_max = 0.95 .* vec(maximum(ys; dims = 1)) + R1PenaltyComputer(r_max) +end +function HybridVariationalInference.compute_penalty( + pc::R1PenaltyComputer, + y_pred::AbstractMatrix, addq_pred::AbstractMatrix, θMs_tr::AbstractMatrix, θP::AbstractVector, + i_sites::AbstractVector, + ϕg, ϕq::AbstractVector + ) + # @assert pc.r_max[i_sites] == 0.95 .* map(col -> maximum(x -> isfinite(x) ? x : zero(x), col), eachcol(y_obs)) + # add a penalty if r1 is larger r_max + penalty = max.(zero(eltype(θMs_tr)), θMs_tr[:,:r1] .- pc.r_max[i_sites]) + (;penalty) +end + +penalty_computer = R1PenaltyComputer(y_obs) +``` + +Rerunning the inversion using with the update PenaltyComputer: +```{julia} +prob_pen = HybridProblem(probo; penalty_computer) +(; probo) = solve(prob_pen, solver; + callback = callback_loss(100), # output during fitting + epochs = 5, +); +``` + + diff --git a/ext/HybridVariationalInferenceDataFramesExt.jl b/ext/HybridVariationalInferenceDataFramesExt.jl new file mode 100644 index 0000000..53ab8d2 --- /dev/null +++ b/ext/HybridVariationalInferenceDataFramesExt.jl @@ -0,0 +1,53 @@ +module HybridVariationalInferenceDataFramesExt + +using DataFrames +using ComponentArrays: ComponentArrays as CA +using HybridVariationalInference: HybridVariationalInference as HVI +import LinearAlgebra + +function HVI.as_data_frame(cm::CA.ComponentMatrix) + if (CA.getaxes(cm)[1] isa CA.Axis) && length(keys(CA.getaxes(cm)[1])) == size(cm,1) + DataFrame((k => cm[k,:] for k in keys(cm[:,1]))...) + elseif (CA.getaxes(cm)[2] isa CA.Axis) && length(keys(CA.getaxes(cm)[2])) == size(cm,2) + DataFrame((k => cm[:,k] for k in keys(cm[1,:]))...) + else + error("first or second axis must be an scalar axis, but got $(CA.getaxes(cma))") + end +end + +function HVI.as_data_frame(cmt::LinearAlgebra.Adjoint{T, <:CA.ComponentMatrix}) where T + cm = cmt' + HVI.as_data_frame(cm) +end + +function HVI.as_data_frame(cma::CA.ComponentArray{T,3}) where T + if (CA.getaxes(cma)[1] isa CA.Axis) && length(keys(CA.getaxes(cma)[1])) == size(cma,1) + df = DataFrame((k => vec(cma[k,:,:]) for k in keys(cma[:,1,1]))...) + df.dim3 = vcat(fill.(axes(cma,3), size(cma,1))...) + elseif (CA.getaxes(cma)[2] isa CA.Axis) length(keys(CA.getaxes(cma)[2])) == size(cma,2) + df = DataFrame((k => vec(cma[:,k,:]) for k in keys(cma[1,:,1]))...) + df.dim3 = vcat(fill.(axes(cma,3), size(cma,1))...) + else + error("first or second axis must be an Axis, but got $(CA.getaxes(cma))") + end + df +end + +function HVI.as_data_frame(cma4::CA.ComponentArray{T,4}) where T + if length(keys(CA.getaxes(cma4)[1])) == size(cma4,1) + df = DataFrame((k => vec(cma4[k,:,:,:]) for k in keys(cma4[:,1,1,1]))...) + dim3 = vcat(fill.(axes(cma4,3), size(cma4,2))...) + elseif length(keys(CA.getaxes(cma4)[2])) == size(cma4,2) + df = DataFrame((k => vec(cma4[:,k,:,:]) for k in keys(cma4[1,:,1,1]))...) + dim3 = vcat(fill.(axes(cma4,3), size(cma4,1))...) + else + error("first or second axis must be an Axis, but got $(CA.getaxes(cma))") + end + df.dim3 = vcat(fill(dim3, size(cma4,4))...) + df.dim4 = vcat(fill.(axes(cma4,4), prod(size(cma4,d) for d in (1,3)))...) + df +end + + + +end # module diff --git a/ext/HybridVariationalInferenceFluxExt.jl b/ext/HybridVariationalInferenceFluxExt.jl index 315bc51..cf0cb6b 100644 --- a/ext/HybridVariationalInferenceFluxExt.jl +++ b/ext/HybridVariationalInferenceFluxExt.jl @@ -55,12 +55,7 @@ end function HVI.construct_3layer_MLApplicator( rng::AbstractRNG, prob::HVI.AbstractHybridProblem, ::Val{:Flux}; scenario::Val{scen}) where scen - (;θM) = get_hybridproblem_par_templates(prob; scenario) - n_out = length(θM) - n_covar = get_hybridproblem_n_covar(prob; scenario) - n_pbm_covars = length(get_hybridproblem_pbmpar_covars(prob; scenario)) - n_input = n_covar + n_pbm_covars - #(; n_covar, n_θM) = get_hybridproblem_sizes(prob; scenario) + (; n_input, n_output) = get_numberof_inputs_outputs(prob; scenario) float_type = get_hybridproblem_float_type(prob; scenario) is_using_dropout = :use_dropout ∈ scen is_using_dropout && error("dropout scenario not supported with Flux yet.") @@ -69,7 +64,7 @@ function HVI.construct_3layer_MLApplicator( Flux.Dense(n_input => n_input * 4, tanh), Flux.Dense(n_input * 4 => n_input * 4, tanh), # dense layer without bias that maps to n outputs and `logistic` activation - Flux.Dense(n_input * 4 => n_out, logistic, bias = false) + Flux.Dense(n_input * 4 => n_output, logistic, bias = false) ) construct_ChainsApplicator(rng, g_chain, float_type) end diff --git a/ext/HybridVariationalInferenceLuxExt.jl b/ext/HybridVariationalInferenceLuxExt.jl index c8995c9..d457c82 100644 --- a/ext/HybridVariationalInferenceLuxExt.jl +++ b/ext/HybridVariationalInferenceLuxExt.jl @@ -53,12 +53,7 @@ function HVI.construct_3layer_MLApplicator( rng::AbstractRNG, prob::HVI.AbstractHybridProblem, ::Val{:Lux}; scenario::Val{scen}, p_dropout = 0.2) where scen - (;θM) = get_hybridproblem_par_templates(prob; scenario) - n_out = length(θM) - n_covar = get_hybridproblem_n_covar(prob; scenario) - n_pbm_covars = length(get_hybridproblem_pbmpar_covars(prob; scenario)) - n_input = n_covar + n_pbm_covars - #(; n_covar, n_θM) = get_hybridproblem_sizes(prob; scenario) + (; n_input, n_output) = get_numberof_inputs_outputs(prob; scenario) float_type = get_hybridproblem_float_type(prob; scenario) is_using_dropout = :use_dropout ∈ scen g_chain = if is_using_dropout @@ -69,7 +64,7 @@ function HVI.construct_3layer_MLApplicator( Lux.Dense(n_input * 4 => n_input * 4, tanh), Lux.Dropout(p_dropout), # dense layer without bias that maps to n outputs and `logistic` activation - Lux.Dense(n_input * 4 => n_out, logistic, use_bias = false) + Lux.Dense(n_input * 4 => n_output, logistic, use_bias = false) ) else Lux.Chain( @@ -77,7 +72,7 @@ function HVI.construct_3layer_MLApplicator( Lux.Dense(n_input => n_input * 4, tanh), Lux.Dense(n_input * 4 => n_input * 4, tanh), # dense layer without bias that maps to n outputs and `logistic` activation - Lux.Dense(n_input * 4 => n_out, logistic, use_bias = false) + Lux.Dense(n_input * 4 => n_output, logistic, use_bias = false) ) end construct_ChainsApplicator(rng, g_chain, float_type) diff --git a/ext/HybridVariationalInferenceSimpleChainsExt.jl b/ext/HybridVariationalInferenceSimpleChainsExt.jl index 8c36453..39a3ed3 100644 --- a/ext/HybridVariationalInferenceSimpleChainsExt.jl +++ b/ext/HybridVariationalInferenceSimpleChainsExt.jl @@ -22,12 +22,8 @@ end function HVI.construct_3layer_MLApplicator( rng::AbstractRNG, prob::HVI.AbstractHybridProblem, ::Val{:SimpleChains}; scenario::Val{scen}) where scen - n_covar = get_hybridproblem_n_covar(prob; scenario) - n_pbm_covars = length(get_hybridproblem_pbmpar_covars(prob; scenario)) - n_input = n_covar + n_pbm_covars - FloatType = get_hybridproblem_float_type(prob; scenario) - (;θM) = get_hybridproblem_par_templates(prob; scenario) - n_out = length(θM) + (; n_input, n_output) = get_numberof_inputs_outputs(prob; scenario) + float_type = get_hybridproblem_float_type(prob; scenario) is_using_dropout = :use_dropout ∈ scen g_chain = if is_using_dropout SimpleChain( @@ -38,7 +34,7 @@ function HVI.construct_3layer_MLApplicator( TurboDense{true}(tanh, n_input * 4), SimpleChains.Dropout(0.2), # dense layer without bias that maps to n outputs and `logistic` activation - TurboDense{false}(logistic, n_out) + TurboDense{false}(logistic, n_output) ) else SimpleChain( @@ -47,10 +43,10 @@ function HVI.construct_3layer_MLApplicator( TurboDense{true}(tanh, n_input * 4), TurboDense{true}(tanh, n_input * 4), # dense layer without bias that maps to n outputs and `logistic` activation - TurboDense{false}(logistic, n_out) + TurboDense{false}(logistic, n_output) ) end - construct_ChainsApplicator(rng, g_chain, FloatType) + construct_ChainsApplicator(rng, g_chain, float_type) end end # module diff --git a/intermediate/basic_cpu_results.jld2 b/intermediate/basic_cpu_results.jld2 new file mode 100644 index 0000000..9ff7979 Binary files /dev/null and b/intermediate/basic_cpu_results.jld2 differ diff --git a/projects/clustered_sites/CovDiDj_shared_component.md b/projects/clustered_sites/CovDiDj_shared_component.md new file mode 100644 index 0000000..248087a --- /dev/null +++ b/projects/clustered_sites/CovDiDj_shared_component.md @@ -0,0 +1,56 @@ +Setup +Let $z_{is}, z_{js}, z_{ls} \overset{\text{iid}}{\sim} \mathcal{N}(0,1)$ (independent scalar Gaussians, one coordinate $s$ from each whitened observation). Define: +$$a_s = z_{is} - z_{js}, \qquad b_s = z_{js} - z_{ls}.$$ +We need $\mathbb{E}[a_s^2]$, $\mathbb{E}[b_s^2]$, and $\mathbb{E}[a_s^2 b_s^2]$. +Step 1: $\mathbb{E}[a_s^2]$ and $\mathbb{E}[b_s^2]$ +Since $z_{is}, z_{js} \overset{\text{iid}}{\sim} \mathcal{N}(0,1)$: +$$\mathbb{E}[a_s^2] = \operatorname{Var}(z_{is} - z_{js}) = \operatorname{Var}(z_{is}) + \operatorname{Var}(z_{js}) = 1 + 1 = 2,$$ +and identically $\mathbb{E}[b_s^2] = 2$. +Step 2: $\mathbb{E}[a_s^2 b_s^2]$ via explicit expansion +Substitute the definitions: +$$\mathbb{E}[a_s^2 b_s^2] = \mathbb{E}!\left[(z_{is} - z_{js})^2(z_{js} - z_{ls})^2\right].$$ +Expand each square: +$$= \mathbb{E}!\left[(z_{is}^2 - 2z_{is}z_{js} + z_{js}^2)(z_{js}^2 - 2z_{js}z_{ls} + z_{ls}^2)\right].$$ +Expanding the full product gives 9 terms: +$$= \mathbb{E}\Big[ z_{is}^2 z_{js}^2 + + + 2z_{is}^2 z_{js}z_{ls} + + + z_{is}^2 z_{ls}^2 + + + 2z_{is}z_{js}^3 +\\ + + 4z_{is}z_{js}^2 z_{js}z_{ls} + + + 2z_{is}z_{js}z_{ls}^2 + + + z_{js}^4 + + + 2z_{js}^3 z_{ls} + + + z_{js}^2 z_{ls}^2 \Big].$$ + + +Step 3: Evaluate each expectation +Using independence of $z_{is}, z_{js}, z_{ls}$ and the standard Gaussian moments: + +$$\mathbb{E}[z^k] = \begin{cases} 0 & k \text{ odd} \\ +1 & k=2 \\ +3 & k=4 +\end{cases}$$ + +Term Expectation Value +- 1 $\mathbb{E}[z_{is}^2]\mathbb{E}[z_{js}^2]$ $1 \cdot 1 = 1$ +- 2 $-2\mathbb{E}[z_{is}^2]\mathbb{E}[z_{js}]\mathbb{E}[z_{ls}]$ $-2 \cdot 1 \cdot 0 \cdot 0 = 0$ +- 3 $\mathbb{E}[z_{is}^2]\mathbb{E}[z_{ls}^2]$ $1 \cdot 1 = 1$ +- 4 $-2\mathbb{E}[z_{is}]\mathbb{E}[z_{js}^3]$ $-2 \cdot 0 \cdot 0 = 0$ +- 5 $4\mathbb{E}[z_{is}]\mathbb{E}[z_{js}^2]\mathbb{E}[z_{ls}]$ $4 \cdot 0 \cdot 1 \cdot 0 = 0$ +- 6 $-2\mathbb{E}[z_{is}]\mathbb{E}[z_{js}]\mathbb{E}[z_{ls}^2]$ $-2 \cdot 0 \cdot 0 \cdot 1 = 0$ +- 7 $\mathbb{E}[z_{js}^4]$ $3$ +- 8 $-2\mathbb{E}[z_{js}^3]\mathbb{E}[z_{ls}]$ $-2 \cdot 0 \cdot 0 = 0$ +- 9 $\mathbb{E}[z_{js}^2]\mathbb{E}[z_{ls}^2]$ $1 \cdot 1 = 1$ + +Summing all non-zero contributions: +$$\mathbb{E}[a_s^2 b_s^2] = 1 + 0 + 1 + 0 + 0 + 0 + 3 + 0 + 1 = \mathbf{6}.$$ +Step 4: The Covariance +$$\operatorname{Cov}(a_s^2, b_s^2) = \mathbb{E}[a_s^2 b_s^2] - \mathbb{E}[a_s^2]\mathbb{E}[b_s^2] = 6 - 2 \cdot 2 = \boxed{2}.$$ diff --git a/projects/clustered_sites/Manifest.toml b/projects/clustered_sites/Manifest.toml new file mode 100644 index 0000000..638a6c5 --- /dev/null +++ b/projects/clustered_sites/Manifest.toml @@ -0,0 +1,3587 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.11.9" +manifest_format = "2.0" +project_hash = "ea129814c6941af8ca70add5ad2460827f181351" + +[[deps.ADTypes]] +git-tree-sha1 = "f7304359109c768cf32dc5fa2d371565bb63b68a" +uuid = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +version = "1.21.0" +weakdeps = ["ChainRulesCore", "ConstructionBase", "EnzymeCore"] + + [deps.ADTypes.extensions] + ADTypesChainRulesCoreExt = "ChainRulesCore" + ADTypesConstructionBaseExt = "ConstructionBase" + ADTypesEnzymeCoreExt = "EnzymeCore" + +[[deps.AbstractFFTs]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "d92ad398961a3ed262d8bf04a1a2b8340f915fef" +uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c" +version = "1.5.0" +weakdeps = ["ChainRulesCore", "Test"] + + [deps.AbstractFFTs.extensions] + AbstractFFTsChainRulesCoreExt = "ChainRulesCore" + AbstractFFTsTestExt = "Test" + +[[deps.AbstractMCMC]] +deps = ["BangBang", "ConsoleProgressMonitor", "Dates", "Distributed", "LogDensityProblems", "Logging", "LoggingExtras", "ProgressLogging", "Random", "StatsBase", "TerminalLoggers", "UUIDs"] +git-tree-sha1 = "8ac6182431567907e0d5170bcac6dd48fa541f78" +uuid = "80f14c24-f653-4e6a-9b94-39d6b0f70001" +version = "5.15.1" + + [deps.AbstractMCMC.extensions] + AbstractMCMCOnlineStatsExt = "OnlineStats" + AbstractMCMCTensorBoardLoggerExt = "TensorBoardLogger" + + [deps.AbstractMCMC.weakdeps] + OnlineStats = "a15396b6-48d5-5d58-9928-6d29437db91e" + TensorBoardLogger = "899adc3e-224a-11e9-021f-63837185c80f" + +[[deps.AbstractPPL]] +deps = ["AbstractMCMC", "Accessors", "BangBang", "DensityInterface", "JSON", "LinearAlgebra", "MacroTools", "OrderedCollections", "Random", "StatsBase"] +git-tree-sha1 = "e7be2de9646c1f61332de9f1e32c7dedf1e00831" +uuid = "7a57a42e-76ec-4ea3-a279-07e840d6d9cf" +version = "0.14.2" +weakdeps = ["Distributions"] + + [deps.AbstractPPL.extensions] + AbstractPPLDistributionsExt = ["Distributions", "LinearAlgebra"] + +[[deps.AbstractTrees]] +git-tree-sha1 = "2d9c9a55f9c93e8887ad391fbae72f8ef55e1177" +uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +version = "0.4.5" + +[[deps.Accessors]] +deps = ["CompositionsBase", "ConstructionBase", "Dates", "InverseFunctions", "MacroTools"] +git-tree-sha1 = "2eeb2c9bef11013efc6f8f97f32ee59b146b09fb" +uuid = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697" +version = "0.1.44" + + [deps.Accessors.extensions] + AxisKeysExt = "AxisKeys" + IntervalSetsExt = "IntervalSets" + LinearAlgebraExt = "LinearAlgebra" + StaticArraysExt = "StaticArrays" + StructArraysExt = "StructArrays" + TestExt = "Test" + UnitfulExt = "Unitful" + + [deps.Accessors.weakdeps] + AxisKeys = "94b1ba4f-4ee9-5380-92f1-94cde586c3c5" + IntervalSets = "8197267c-284f-5f27-9208-e0e47529a953" + LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" + StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" + StructArrays = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" + Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" + +[[deps.Adapt]] +deps = ["LinearAlgebra", "Requires"] +git-tree-sha1 = "0761717147821d696c9470a7a86364b2fbd22fd8" +uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" +version = "4.5.2" +weakdeps = ["SparseArrays", "StaticArrays"] + + [deps.Adapt.extensions] + AdaptSparseArraysExt = "SparseArrays" + AdaptStaticArraysExt = "StaticArrays" + +[[deps.AdaptivePredicates]] +git-tree-sha1 = "7e651ea8d262d2d74ce75fdf47c4d63c07dba7a6" +uuid = "35492f91-a3bd-45ad-95db-fcad7dcfedb7" +version = "1.2.0" + +[[deps.AlgebraOfGraphics]] +deps = ["Accessors", "Colors", "DataAPI", "Dates", "Dictionaries", "FileIO", "GLM", "GeoInterface", "GeometryBasics", "GridLayoutBase", "Isoband", "KernelDensity", "Loess", "Makie", "NaturalSort", "PlotUtils", "PolygonOps", "PooledArrays", "PrecompileTools", "RelocatableFolders", "StatsBase", "StructArrays", "Tables"] +git-tree-sha1 = "1e2ae4d0ebdfdbc297864db404df47b71d49d025" +uuid = "cbdf2221-f076-402e-a563-3d30da359d67" +version = "0.12.7" + + [deps.AlgebraOfGraphics.extensions] + AlgebraOfGraphicsDynamicQuantitiesExt = "DynamicQuantities" + AlgebraOfGraphicsUnitfulExt = "Unitful" + + [deps.AlgebraOfGraphics.weakdeps] + DynamicQuantities = "06fc5a27-2a28-4c7c-a15d-362465fb6821" + Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" + +[[deps.AliasTables]] +deps = ["PtrArrays", "Random"] +git-tree-sha1 = "9876e1e164b144ca45e9e3198d0b689cadfed9ff" +uuid = "66dad0bd-aa9a-41b7-9441-69ab47430ed8" +version = "1.1.3" + +[[deps.Animations]] +deps = ["Colors"] +git-tree-sha1 = "e092fa223bf66a3c41f9c022bd074d916dc303e7" +uuid = "27a7e980-b3e6-11e9-2bcd-0b925532e340" +version = "0.4.2" + +[[deps.ArgCheck]] +git-tree-sha1 = "f9e9a66c9b7be1ad7372bbd9b062d9230c30c5ce" +uuid = "dce04be8-c92d-5529-be00-80e4d2c0e197" +version = "2.5.0" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.2" + +[[deps.Arpack]] +deps = ["Arpack_jll", "Libdl", "LinearAlgebra", "Logging"] +git-tree-sha1 = "9b9b347613394885fd1c8c7729bfc60528faa436" +uuid = "7d9fca2a-8960-54d3-9f78-7d1dccf2cb97" +version = "0.5.4" + +[[deps.Arpack_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "libblastrampoline_jll"] +git-tree-sha1 = "7f54761502ff149a9d492e4acefe9805898e29b3" +uuid = "68821587-b530-5797-8361-c406ea357684" +version = "3.5.2+0" + +[[deps.ArrayInterface]] +deps = ["Adapt", "LinearAlgebra"] +git-tree-sha1 = "78b3a7a536b4b0a747a0f296ea77091ca0a9f9a3" +uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" +version = "7.23.0" + + [deps.ArrayInterface.extensions] + ArrayInterfaceAMDGPUExt = "AMDGPU" + ArrayInterfaceBandedMatricesExt = "BandedMatrices" + ArrayInterfaceBlockBandedMatricesExt = "BlockBandedMatrices" + ArrayInterfaceCUDAExt = "CUDA" + ArrayInterfaceCUDSSExt = ["CUDSS", "CUDA"] + ArrayInterfaceChainRulesCoreExt = "ChainRulesCore" + ArrayInterfaceChainRulesExt = "ChainRules" + ArrayInterfaceGPUArraysCoreExt = "GPUArraysCore" + ArrayInterfaceMetalExt = "Metal" + ArrayInterfaceReverseDiffExt = "ReverseDiff" + ArrayInterfaceSparseArraysExt = "SparseArrays" + ArrayInterfaceStaticArraysCoreExt = "StaticArraysCore" + ArrayInterfaceTrackerExt = "Tracker" + + [deps.ArrayInterface.weakdeps] + AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" + BandedMatrices = "aae01518-5342-5314-be14-df237901396f" + BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0" + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + CUDSS = "45b445bb-4962-46a0-9369-b4df9d0f772e" + ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" + Metal = "dde4c033-4e86-420c-a63e-0dd931031962" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" +version = "1.11.0" + +[[deps.Atomix]] +deps = ["UnsafeAtomics"] +git-tree-sha1 = "b8651b2eb5796a386b0398a20b519a6a6150f75c" +uuid = "a9b6321e-bd34-4604-b9c9-b65b8de01458" +version = "1.1.3" + + [deps.Atomix.extensions] + AtomixCUDAExt = "CUDA" + AtomixMetalExt = "Metal" + AtomixOpenCLExt = "OpenCL" + AtomixoneAPIExt = "oneAPI" + + [deps.Atomix.weakdeps] + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + Metal = "dde4c033-4e86-420c-a63e-0dd931031962" + OpenCL = "08131aa3-fb12-5dee-8b74-c09406e224a2" + oneAPI = "8f75cd03-7ff8-4ecb-9b8f-daf728133b1b" + +[[deps.Automa]] +deps = ["PrecompileTools", "SIMD", "TranscodingStreams"] +git-tree-sha1 = "a8f503e8e1a5f583fbef15a8440c8c7e32185df2" +uuid = "67c07d97-cdcb-5c2c-af73-a7f9c32a568b" +version = "1.1.0" + +[[deps.AxisAlgorithms]] +deps = ["LinearAlgebra", "Random", "SparseArrays", "WoodburyMatrices"] +git-tree-sha1 = "01b8ccb13d68535d73d2b0c23e39bd23155fb712" +uuid = "13072b0f-2c55-5437-9ae7-d433b7a33950" +version = "1.1.0" + +[[deps.AxisArrays]] +deps = ["Dates", "IntervalSets", "IterTools", "RangeArrays"] +git-tree-sha1 = "4126b08903b777c88edf1754288144a0492c05ad" +uuid = "39de3d68-74b9-583c-8d2d-e117c070f3a9" +version = "0.4.8" + +[[deps.BFloat16s]] +deps = ["LinearAlgebra", "Printf", "Random"] +git-tree-sha1 = "e386db8b4753b42caac75ac81d0a4fe161a68a97" +uuid = "ab4f0b2a-ad5b-11e8-123f-65d77653426b" +version = "0.6.1" + +[[deps.BangBang]] +deps = ["Accessors", "ConstructionBase", "InitialValues", "LinearAlgebra"] +git-tree-sha1 = "cceb62468025be98d42a5dc581b163c20896b040" +uuid = "198e06fe-97b7-11e9-32a5-e1d131e6ad66" +version = "0.4.9" + + [deps.BangBang.extensions] + BangBangChainRulesCoreExt = "ChainRulesCore" + BangBangDataFramesExt = "DataFrames" + BangBangStaticArraysExt = "StaticArrays" + BangBangStructArraysExt = "StructArrays" + BangBangTablesExt = "Tables" + BangBangTypedTablesExt = "TypedTables" + + [deps.BangBang.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" + StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" + StructArrays = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" + Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" + TypedTables = "9d95f2ec-7b3d-5a63-8d20-e2491e220bb9" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.BaseDirs]] +git-tree-sha1 = "bca794632b8a9bbe159d56bf9e31c422671b35e0" +uuid = "18cc8868-cbac-4acf-b575-c8ff214dc66f" +version = "1.3.2" + +[[deps.Baselet]] +git-tree-sha1 = "aebf55e6d7795e02ca500a689d326ac979aaf89e" +uuid = "9718e550-a3fa-408a-8086-8db961cd8217" +version = "0.1.1" + +[[deps.Bijectors]] +deps = ["AbstractPPL", "ArgCheck", "ChainRulesCore", "ChangesOfVariables", "DifferentiationInterface", "Distributions", "DocStringExtensions", "EnzymeCore", "FillArrays", "Functors", "InverseFunctions", "IrrationalConstants", "LinearAlgebra", "LogExpFunctions", "MappedArrays", "Random", "Reexport", "Roots", "SparseArrays", "Statistics", "Test"] +git-tree-sha1 = "d6ee8f89dd20f933fbdad578a798e32babb617ee" +uuid = "76274a88-744f-5084-9051-94815aaf08c4" +version = "0.15.20" + + [deps.Bijectors.extensions] + BijectorsDistributionsADExt = "DistributionsAD" + BijectorsForwardDiffExt = "ForwardDiff" + BijectorsLazyArraysExt = "LazyArrays" + BijectorsMooncakeExt = "Mooncake" + BijectorsReverseDiffChainRulesExt = ["ChainRules", "ReverseDiff"] + BijectorsReverseDiffExt = "ReverseDiff" + + [deps.Bijectors.weakdeps] + ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" + DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + LazyArrays = "5078a376-72f3-5289-bfd5-ec5146d43c02" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + +[[deps.BitFlags]] +git-tree-sha1 = "0691e34b3bb8be9307330f88d1a3c3f25466c24d" +uuid = "d1d4a3ce-64b1-5f1a-9ba4-7e7e69966f35" +version = "0.1.9" + +[[deps.BitTwiddlingConvenienceFunctions]] +deps = ["Static"] +git-tree-sha1 = "f21cfd4950cb9f0587d5067e69405ad2acd27b87" +uuid = "62783981-4cbd-42fc-bca8-16325de8dc4b" +version = "0.1.6" + +[[deps.BlockDiagonals]] +deps = ["FillArrays", "LinearAlgebra"] +git-tree-sha1 = "6e0ac86a90783f36f6daa496acf39ca138be8922" +uuid = "0a1fb500-61f7-11e9-3c65-f5ef3456f9f0" +version = "0.2.0" +weakdeps = ["ChainRulesCore"] + + [deps.BlockDiagonals.extensions] + ChainRulesCoreExt = "ChainRulesCore" + +[[deps.Bzip2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "1b96ea4a01afe0ea4090c5c8039690672dd13f2e" +uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0" +version = "1.0.9+0" + +[[deps.CEnum]] +git-tree-sha1 = "389ad5c84de1ae7cf0e28e381131c98ea87d54fc" +uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82" +version = "0.5.0" + +[[deps.CPUSummary]] +deps = ["CpuId", "IfElse", "PrecompileTools", "Preferences", "Static"] +git-tree-sha1 = "f3a21d7fc84ba618a779d1ed2fcca2e682865bab" +uuid = "2a0fbf3d-bb9c-48f3-b0a9-814d99fd7ab9" +version = "0.2.7" + +[[deps.CRC32c]] +uuid = "8bf52ea8-c179-5cab-976a-9e18b702a9bc" +version = "1.11.0" + +[[deps.CRlibm]] +deps = ["CRlibm_jll"] +git-tree-sha1 = "66188d9d103b92b6cd705214242e27f5737a1e5e" +uuid = "96374032-68de-5a5b-8d9e-752f78720389" +version = "1.0.2" + +[[deps.CRlibm_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "e329286945d0cfc04456972ea732551869af1cfc" +uuid = "4e9b3aee-d8a1-5a3d-ad8b-7d824db253f0" +version = "1.0.1+0" + +[[deps.CUDA]] +deps = ["AbstractFFTs", "Adapt", "BFloat16s", "CEnum", "CUDA_Compiler_jll", "CUDA_Driver_jll", "CUDA_Runtime_Discovery", "CUDA_Runtime_jll", "Crayons", "ExprTools", "GPUArrays", "GPUCompiler", "GPUToolbox", "KernelAbstractions", "LLVM", "LLVMLoopInfo", "LazyArtifacts", "Libdl", "LinearAlgebra", "Logging", "NVTX", "Preferences", "PrettyTables", "Printf", "Random", "Random123", "RandomNumbers", "Reexport", "SparseArrays", "StaticArrays", "Statistics", "demumble_jll"] +git-tree-sha1 = "ea6a2ab8307059b6c9ea186ff7dfcd032a13b731" +uuid = "052768ef-5323-5732-b1bb-66c8b64840ba" +version = "5.11.0" + + [deps.CUDA.extensions] + ChainRulesCoreExt = "ChainRulesCore" + EnzymeCoreExt = "EnzymeCore" + SparseMatricesCSRExt = "SparseMatricesCSR" + SpecialFunctionsExt = "SpecialFunctions" + + [deps.CUDA.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" + SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1" + SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" + +[[deps.CUDA_Compiler_jll]] +deps = ["Artifacts", "CUDA_Driver_jll", "CUDA_Runtime_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] +git-tree-sha1 = "8c19e97de5b7574672e4a7a3abd55714ad66d59a" +uuid = "d1e2174e-dfdc-576e-b43e-73b79eb1aca8" +version = "0.4.2+0" + +[[deps.CUDA_Driver_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "TOML"] +git-tree-sha1 = "061f39cc84e99928830aa1005d79f7e99097ba28" +uuid = "4ee394cb-3365-5eb0-8335-949819d2adfc" +version = "13.2.0+0" + +[[deps.CUDA_Runtime_Discovery]] +deps = ["Libdl"] +git-tree-sha1 = "f9a521f52d236fe49f1028d69e549e7f2644bb72" +uuid = "1af6417a-86b4-443c-805f-a4643ffb695f" +version = "1.0.0" + +[[deps.CUDA_Runtime_jll]] +deps = ["Artifacts", "CUDA_Driver_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] +git-tree-sha1 = "af17d37b5b8b4d7525f8902eba1ef6141a9a7d3b" +uuid = "76a88914-d11a-5bdc-97e0-2f5a05c973a2" +version = "0.21.0+0" + +[[deps.CUDNN_jll]] +deps = ["Artifacts", "CUDA_Runtime_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] +git-tree-sha1 = "70dea6a7133d2100a143b515a00d6d887e208500" +uuid = "62b44479-cb7b-5706-934f-f13b2eb2e645" +version = "9.20.0+0" + +[[deps.Cairo]] +deps = ["Cairo_jll", "Colors", "Glib_jll", "Graphics", "Libdl", "Pango_jll"] +git-tree-sha1 = "71aa551c5c33f1a4415867fe06b7844faadb0ae9" +uuid = "159f3aea-2a34-519c-b102-8c37f9878175" +version = "1.1.1" + +[[deps.CairoMakie]] +deps = ["CRC32c", "Cairo", "Cairo_jll", "Colors", "FileIO", "FreeType", "GeometryBasics", "LinearAlgebra", "Makie", "PrecompileTools"] +git-tree-sha1 = "fa072933899aae6dc61dde934febed8254e66c6a" +uuid = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" +version = "0.15.9" + +[[deps.Cairo_jll]] +deps = ["Artifacts", "Bzip2_jll", "CompilerSupportLibraries_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "JLLWrappers", "Libdl", "Pixman_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Zlib_jll", "libpng_jll"] +git-tree-sha1 = "d0efe2c6fdcdaa1c161d206aa8b933788397ec71" +uuid = "83423d85-b0ee-5818-9007-b63ccbeb887a" +version = "1.18.6+0" + +[[deps.ChainRules]] +deps = ["Adapt", "ChainRulesCore", "Compat", "Distributed", "GPUArraysCore", "IrrationalConstants", "LinearAlgebra", "Random", "RealDot", "SparseArrays", "SparseInverseSubset", "Statistics", "StructArrays", "SuiteSparse"] +git-tree-sha1 = "3c190c570fb3108c09f838607386d10c71701789" +uuid = "082447d4-558c-5d27-93f4-14fc19e9eca2" +version = "1.73.0" + +[[deps.ChainRulesCore]] +deps = ["Compat", "LinearAlgebra"] +git-tree-sha1 = "12177ad6b3cad7fd50c8b3825ce24a99ad61c18f" +uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +version = "1.26.1" +weakdeps = ["SparseArrays"] + + [deps.ChainRulesCore.extensions] + ChainRulesCoreSparseArraysExt = "SparseArrays" + +[[deps.ChangesOfVariables]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "3aa4bf1532aa2e14e0374c4fd72bed9a9d0d0f6c" +uuid = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0" +version = "0.1.10" +weakdeps = ["InverseFunctions", "Test"] + + [deps.ChangesOfVariables.extensions] + ChangesOfVariablesInverseFunctionsExt = "InverseFunctions" + ChangesOfVariablesTestExt = "Test" + +[[deps.ChunkCodecCore]] +git-tree-sha1 = "1a3ad7e16a321667698a19e77362b35a1e94c544" +uuid = "0b6fb165-00bc-4d37-ab8b-79f91016dbe1" +version = "1.0.1" + +[[deps.ChunkCodecLibZlib]] +deps = ["ChunkCodecCore", "Zlib_jll"] +git-tree-sha1 = "cee8104904c53d39eb94fd06cbe60cb5acde7177" +uuid = "4c0bbee4-addc-4d73-81a0-b6caacae83c8" +version = "1.0.0" + +[[deps.ChunkCodecLibZstd]] +deps = ["ChunkCodecCore", "Zstd_jll"] +git-tree-sha1 = "34d9873079e4cb3d0c62926a225136824677073f" +uuid = "55437552-ac27-4d47-9aa3-63184e8fd398" +version = "1.0.0" + +[[deps.CloseOpenIntervals]] +deps = ["Static", "StaticArrayInterface"] +git-tree-sha1 = "05ba0d07cd4fd8b7a39541e31a7b0254704ea581" +uuid = "fb6a15b2-703c-40df-9091-08a04967cfa9" +version = "0.1.13" + +[[deps.Clustering]] +deps = ["Distances", "LinearAlgebra", "NearestNeighbors", "Printf", "Random", "SparseArrays", "Statistics", "StatsBase"] +git-tree-sha1 = "3e22db924e2945282e70c33b75d4dde8bfa44c94" +uuid = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5" +version = "0.15.8" + +[[deps.CodecZlib]] +deps = ["TranscodingStreams", "Zlib_jll"] +git-tree-sha1 = "962834c22b66e32aa10f7611c08c8ca4e20749a9" +uuid = "944b1d66-785c-5afd-91f1-9de20f533193" +version = "0.7.8" + +[[deps.CodecZstd]] +deps = ["TranscodingStreams", "Zstd_jll"] +git-tree-sha1 = "da54a6cd93c54950c15adf1d336cfd7d71f51a56" +uuid = "6b39b394-51ab-5f42-8807-6242bab2b4c2" +version = "0.8.7" + +[[deps.ColorBrewer]] +deps = ["Colors", "JSON"] +git-tree-sha1 = "07da79661b919001e6863b81fc572497daa58349" +uuid = "a2cac450-b92f-5266-8821-25eda20663c8" +version = "0.4.2" + +[[deps.ColorSchemes]] +deps = ["ColorTypes", "ColorVectorSpace", "Colors", "FixedPointNumbers", "PrecompileTools", "Random"] +git-tree-sha1 = "b0fd3f56fa442f81e0a47815c92245acfaaa4e34" +uuid = "35d6a980-a343-548e-a6ea-1d62b119f2f4" +version = "3.31.0" + +[[deps.ColorTypes]] +deps = ["FixedPointNumbers", "Random"] +git-tree-sha1 = "67e11ee83a43eb71ddc950302c53bf33f0690dfe" +uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f" +version = "0.12.1" +weakdeps = ["StyledStrings"] + + [deps.ColorTypes.extensions] + StyledStringsExt = "StyledStrings" + +[[deps.ColorVectorSpace]] +deps = ["ColorTypes", "FixedPointNumbers", "LinearAlgebra", "Requires", "Statistics", "TensorCore"] +git-tree-sha1 = "8b3b6f87ce8f65a2b4f857528fd8d70086cd72b1" +uuid = "c3611d14-8923-5661-9e6a-0046d554d3a4" +version = "0.11.0" +weakdeps = ["SpecialFunctions"] + + [deps.ColorVectorSpace.extensions] + SpecialFunctionsExt = "SpecialFunctions" + +[[deps.Colors]] +deps = ["ColorTypes", "FixedPointNumbers", "Reexport"] +git-tree-sha1 = "37ea44092930b1811e666c3bc38065d7d87fcc74" +uuid = "5ae59095-9a9b-59fe-a467-6f913c188581" +version = "0.13.1" + +[[deps.Combinatorics]] +git-tree-sha1 = "c761b00e7755700f9cdf5b02039939d1359330e1" +uuid = "861a8166-3701-5b0c-9a16-15d98fcdc6aa" +version = "1.1.0" + +[[deps.CommonSolve]] +git-tree-sha1 = "78ea4ddbcf9c241827e7035c3a03e2e456711470" +uuid = "38540f10-b2f7-11e9-35d8-d573e4eb0ff2" +version = "0.2.6" + +[[deps.CommonSubexpressions]] +deps = ["MacroTools"] +git-tree-sha1 = "cda2cfaebb4be89c9084adaca7dd7333369715c5" +uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" +version = "0.3.1" + +[[deps.CommonWorldInvalidations]] +git-tree-sha1 = "ae52d1c52048455e85a387fbee9be553ec2b68d0" +uuid = "f70d9fcc-98c5-4d4a-abd7-e4cdeebd8ca8" +version = "1.0.0" + +[[deps.Compat]] +deps = ["TOML", "UUIDs"] +git-tree-sha1 = "9d8a54ce4b17aa5bdce0ea5c34bc5e7c340d16ad" +uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" +version = "4.18.1" +weakdeps = ["Dates", "LinearAlgebra"] + + [deps.Compat.extensions] + CompatLinearAlgebraExt = "LinearAlgebra" + +[[deps.CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "1.1.1+0" + +[[deps.ComponentArrays]] +deps = ["Adapt", "ArrayInterface", "ChainRulesCore", "ConstructionBase", "Functors", "LinearAlgebra", "StaticArrayInterface", "StaticArraysCore"] +git-tree-sha1 = "27abfdccf37d0a0c34c0c1594fb0fc1792ee0725" +uuid = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" +version = "0.15.34" + + [deps.ComponentArrays.extensions] + ComponentArraysGPUArraysExt = "GPUArrays" + ComponentArraysKernelAbstractionsExt = "KernelAbstractions" + ComponentArraysMooncakeExt = "Mooncake" + ComponentArraysOptimisersExt = "Optimisers" + ComponentArraysReactantExt = "Reactant" + ComponentArraysRecursiveArrayToolsExt = "RecursiveArrayTools" + ComponentArraysReverseDiffExt = "ReverseDiff" + ComponentArraysSciMLBaseExt = "SciMLBase" + ComponentArraysTrackerExt = "Tracker" + ComponentArraysZygoteExt = "Zygote" + + [deps.ComponentArrays.weakdeps] + GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" + KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + RecursiveArrayTools = "731186ca-8d62-57ce-b412-fbd966d074cd" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[[deps.CompositionsBase]] +git-tree-sha1 = "802bb88cd69dfd1509f6670416bd4434015693ad" +uuid = "a33af91c-f02d-484b-be07-31d278c5ca2b" +version = "0.1.2" +weakdeps = ["InverseFunctions"] + + [deps.CompositionsBase.extensions] + CompositionsBaseInverseFunctionsExt = "InverseFunctions" + +[[deps.ComputePipeline]] +deps = ["Observables", "Preferences"] +git-tree-sha1 = "3b4be73db165146d8a88e47924f464e55ab053cd" +uuid = "95dc2771-c249-4cd0-9c9f-1f3b4330693c" +version = "0.1.7" + +[[deps.ConcreteStructs]] +git-tree-sha1 = "f749037478283d372048690eb3b5f92a79432b34" +uuid = "2569d6c7-a4a2-43d3-a901-331e8e4be471" +version = "0.2.3" + +[[deps.ConcurrentUtilities]] +deps = ["Serialization", "Sockets"] +git-tree-sha1 = "21d088c496ea22914fe80906eb5bce65755e5ec8" +uuid = "f0e56b4a-5159-44fe-b623-3e5288b988bb" +version = "2.5.1" + +[[deps.ConsoleProgressMonitor]] +deps = ["Logging", "ProgressMeter"] +git-tree-sha1 = "3ab7b2136722890b9af903859afcf457fa3059e8" +uuid = "88cd18e8-d9cc-4ea6-8889-5259c0d15c8b" +version = "0.1.2" + +[[deps.ConstructionBase]] +git-tree-sha1 = "b4b092499347b18a015186eae3042f72267106cb" +uuid = "187b0558-2788-49d3-abe0-74a17ed4e7c9" +version = "1.6.0" +weakdeps = ["IntervalSets", "LinearAlgebra", "StaticArrays"] + + [deps.ConstructionBase.extensions] + ConstructionBaseIntervalSetsExt = "IntervalSets" + ConstructionBaseLinearAlgebraExt = "LinearAlgebra" + ConstructionBaseStaticArraysExt = "StaticArrays" + +[[deps.ContextVariablesX]] +deps = ["Compat", "Logging", "UUIDs"] +git-tree-sha1 = "25cc3803f1030ab855e383129dcd3dc294e322cc" +uuid = "6add18c4-b38d-439d-96f6-d6bc489c04c5" +version = "0.1.3" + +[[deps.Contour]] +git-tree-sha1 = "439e35b0b36e2e5881738abc8857bd92ad6ff9a8" +uuid = "d38c429a-6771-53c6-b99e-75d170b6e991" +version = "0.6.3" + +[[deps.CoreMath]] +deps = ["CoreMath_jll"] +git-tree-sha1 = "8c0480f92b1b1796239156a1b9b1bfb1b39499b4" +uuid = "b7a15901-be09-4a0e-87d2-2e66b0e09b5a" +version = "0.1.0" + +[[deps.CoreMath_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "a692a4c1dc59a4b8bc0b6403876eb3250fde2bc3" +uuid = "a38c48d9-6df1-5ac9-9223-b6ada3b5572b" +version = "0.1.0+0" + +[[deps.CpuId]] +deps = ["Markdown"] +git-tree-sha1 = "fcbb72b032692610bfbdb15018ac16a36cf2e406" +uuid = "adafc99b-e345-5852-983c-f28acb93d879" +version = "0.3.1" + +[[deps.Crayons]] +git-tree-sha1 = "249fe38abf76d48563e2f4556bebd215aa317e15" +uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f" +version = "4.1.1" + +[[deps.DataAPI]] +git-tree-sha1 = "abe83f3a2f1b857aac70ef8b269080af17764bbe" +uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" +version = "1.16.0" + +[[deps.DataFrames]] +deps = ["Compat", "DataAPI", "DataStructures", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrecompileTools", "PrettyTables", "Printf", "Random", "Reexport", "SentinelArrays", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"] +git-tree-sha1 = "5fab31e2e01e70ad66e3e24c968c264d1cf166d6" +uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +version = "1.8.2" + +[[deps.DataStructures]] +deps = ["OrderedCollections"] +git-tree-sha1 = "e86f4a2805f7f19bec5129bc9150c38208e5dc23" +uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" +version = "0.19.4" + +[[deps.DataValueInterfaces]] +git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" +uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464" +version = "1.0.0" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +version = "1.11.0" + +[[deps.Dbus_jll]] +deps = ["Artifacts", "Expat_jll", "JLLWrappers", "Libdl"] +git-tree-sha1 = "473e9afc9cf30814eb67ffa5f2db7df82c3ad9fd" +uuid = "ee1fde0b-3d02-5ea6-8484-8dfef6360eab" +version = "1.16.2+0" + +[[deps.DefineSingletons]] +git-tree-sha1 = "0fba8b706d0178b4dc7fd44a96a92382c9065c2c" +uuid = "244e2a9f-e319-4986-a169-4d1fe445cd52" +version = "0.1.2" + +[[deps.DelaunayTriangulation]] +deps = ["AdaptivePredicates", "EnumX", "ExactPredicates", "Random"] +git-tree-sha1 = "c55f5a9fd67bdbc8e089b5a3111fe4292986a8e8" +uuid = "927a84f5-c5f4-47a5-9785-b46e178433df" +version = "1.6.6" + +[[deps.DelimitedFiles]] +deps = ["Mmap"] +git-tree-sha1 = "9e2f36d3c96a820c678f2f1f1782582fcf685bae" +uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" +version = "1.9.1" + +[[deps.DensityInterface]] +deps = ["InverseFunctions", "Test"] +git-tree-sha1 = "80c3e8639e3353e5d2912fb3a1916b8455e2494b" +uuid = "b429d917-457f-4dbc-8f4c-0cc954292b1d" +version = "0.4.0" + +[[deps.Dictionaries]] +deps = ["Indexing", "Random", "Serialization"] +git-tree-sha1 = "a55766a9c8f66cf19ffcdbdb1444e249bb4ace33" +uuid = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" +version = "0.4.6" + +[[deps.DiffResults]] +deps = ["StaticArraysCore"] +git-tree-sha1 = "782dd5f4561f5d267313f23853baaaa4c52ea621" +uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" +version = "1.1.0" + +[[deps.DiffRules]] +deps = ["IrrationalConstants", "LogExpFunctions", "NaNMath", "Random", "SpecialFunctions"] +git-tree-sha1 = "23163d55f885173722d1e4cf0f6110cdbaf7e272" +uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" +version = "1.15.1" + +[[deps.DifferentiationInterface]] +deps = ["ADTypes", "LinearAlgebra"] +git-tree-sha1 = "7ae99144ea44715402c6c882bfef2adbeadbc4ce" +uuid = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" +version = "0.7.16" + + [deps.DifferentiationInterface.extensions] + DifferentiationInterfaceChainRulesCoreExt = "ChainRulesCore" + DifferentiationInterfaceDiffractorExt = "Diffractor" + DifferentiationInterfaceEnzymeExt = ["EnzymeCore", "Enzyme"] + DifferentiationInterfaceFastDifferentiationExt = "FastDifferentiation" + DifferentiationInterfaceFiniteDiffExt = "FiniteDiff" + DifferentiationInterfaceFiniteDifferencesExt = "FiniteDifferences" + DifferentiationInterfaceForwardDiffExt = ["ForwardDiff", "DiffResults"] + DifferentiationInterfaceGPUArraysCoreExt = "GPUArraysCore" + DifferentiationInterfaceGTPSAExt = "GTPSA" + DifferentiationInterfaceMooncakeExt = "Mooncake" + DifferentiationInterfacePolyesterForwardDiffExt = ["PolyesterForwardDiff", "ForwardDiff", "DiffResults"] + DifferentiationInterfaceReverseDiffExt = ["ReverseDiff", "DiffResults"] + DifferentiationInterfaceSparseArraysExt = "SparseArrays" + DifferentiationInterfaceSparseConnectivityTracerExt = "SparseConnectivityTracer" + DifferentiationInterfaceSparseMatrixColoringsExt = "SparseMatrixColorings" + DifferentiationInterfaceStaticArraysExt = "StaticArrays" + DifferentiationInterfaceSymbolicsExt = "Symbolics" + DifferentiationInterfaceTrackerExt = "Tracker" + DifferentiationInterfaceZygoteExt = ["Zygote", "ForwardDiff"] + + [deps.DifferentiationInterface.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + DiffResults = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" + Diffractor = "9f5e2b26-1114-432f-b630-d3fe2085c51c" + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" + FastDifferentiation = "eb9bf01b-bf85-4b60-bf87-ee5de06c00be" + FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" + FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" + GTPSA = "b27dd330-f138-47c5-815b-40db9dd9b6e8" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + PolyesterForwardDiff = "98d1487c-24ca-40b6-b7ab-df2af84e126b" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" + SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35" + StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" + Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[[deps.DispatchDoctor]] +deps = ["MacroTools", "Preferences"] +git-tree-sha1 = "42cd00edaac86f941815fe557c1d01e11913e07c" +uuid = "8d63f2c5-f18a-4cf2-ba9d-b3f60fc568c8" +version = "0.4.28" +weakdeps = ["ChainRulesCore", "EnzymeCore"] + + [deps.DispatchDoctor.extensions] + DispatchDoctorChainRulesCoreExt = "ChainRulesCore" + DispatchDoctorEnzymeCoreExt = "EnzymeCore" + +[[deps.Distances]] +deps = ["LinearAlgebra", "Statistics", "StatsAPI"] +git-tree-sha1 = "c7e3a542b999843086e2f29dac96a618c105be1d" +uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +version = "0.10.12" +weakdeps = ["ChainRulesCore", "SparseArrays"] + + [deps.Distances.extensions] + DistancesChainRulesCoreExt = "ChainRulesCore" + DistancesSparseArraysExt = "SparseArrays" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" +version = "1.11.0" + +[[deps.DistributionFits]] +deps = ["Distributions", "FillArrays", "LinearAlgebra", "Reexport", "Requires", "StaticArrays", "Statistics", "StatsAPI", "StatsFuns"] +git-tree-sha1 = "a50a9fbdcccc1cb08ff2e1f40c37a268a74b8879" +uuid = "45214091-1ed4-4409-9bcf-fdb48a05e921" +version = "0.3.9" + + [deps.DistributionFits.extensions] + DistributionFitsOptimExt = "Optim" + + [deps.DistributionFits.weakdeps] + Optim = "429524aa-4258-5aef-a3af-852621145aeb" + +[[deps.Distributions]] +deps = ["AliasTables", "FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SpecialFunctions", "Statistics", "StatsAPI", "StatsBase", "StatsFuns"] +git-tree-sha1 = "12184a8cf11c7cbd90a4db8b2cb2f7b6f057cc46" +uuid = "31c24e10-a181-5473-b8eb-7969acd0382f" +version = "0.25.124" +weakdeps = ["ChainRulesCore", "DensityInterface", "Test"] + + [deps.Distributions.extensions] + DistributionsChainRulesCoreExt = "ChainRulesCore" + DistributionsDensityInterfaceExt = "DensityInterface" + DistributionsTestExt = "Test" + +[[deps.DocStringExtensions]] +git-tree-sha1 = "7442a5dfe1ebb773c29cc2962a8980f47221d76c" +uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" +version = "0.9.5" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" + +[[deps.EarCut_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "e3290f2d49e661fbd94046d7e3726ffcb2d41053" +uuid = "5ae413db-bbd1-5e63-b57d-d24a61df00f5" +version = "2.2.4+0" + +[[deps.EnumX]] +git-tree-sha1 = "c49898e8438c828577f04b92fc9368c388ac783c" +uuid = "4e289a0a-7415-4d19-859d-a7e5c4648b56" +version = "1.0.7" + +[[deps.EnzymeCore]] +git-tree-sha1 = "24bbb6fc8fb87eb71c1f8d00184a60fc22c63903" +uuid = "f151be2c-9106-41f4-ab19-57ee4f262869" +version = "0.8.19" +weakdeps = ["Adapt", "ChainRulesCore"] + + [deps.EnzymeCore.extensions] + AdaptExt = "Adapt" + EnzymeCoreChainRulesCoreExt = "ChainRulesCore" + +[[deps.EpollShim_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "8a4be429317c42cfae6a7fc03c31bad1970c310d" +uuid = "2702e6a9-849d-5ed8-8c21-79e8b8f9ee43" +version = "0.0.20230411+1" + +[[deps.ExactPredicates]] +deps = ["IntervalArithmetic", "Random", "StaticArrays"] +git-tree-sha1 = "83231673ea4d3d6008ac74dc5079e77ab2209d8f" +uuid = "429591f6-91af-11e9-00e2-59fbe8cec110" +version = "2.2.9" + +[[deps.ExceptionUnwrapping]] +deps = ["Test"] +git-tree-sha1 = "d36f682e590a83d63d1c7dbd287573764682d12a" +uuid = "460bff9d-24e4-43bc-9d9f-a8973cb893f4" +version = "0.1.11" + +[[deps.Expat_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "27af30de8b5445644e8ffe3bcb0d72049c089cf1" +uuid = "2e619515-83b5-522b-bb60-26c02a35a201" +version = "2.7.3+0" + +[[deps.ExprTools]] +git-tree-sha1 = "27415f162e6028e81c72b82ef756bf321213b6ec" +uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04" +version = "0.1.10" + +[[deps.ExpressionExplorer]] +git-tree-sha1 = "5f1c005ed214356bbe41d442cc1ccd416e510b7e" +uuid = "21656369-7473-754a-2065-74616d696c43" +version = "1.1.4" + +[[deps.ExproniconLite]] +git-tree-sha1 = "c13f0b150373771b0fdc1713c97860f8df12e6c2" +uuid = "55351af7-c7e9-48d6-89ff-24e801d99491" +version = "0.10.14" + +[[deps.Extents]] +git-tree-sha1 = "b309b36a9e02fe7be71270dd8c0fd873625332b4" +uuid = "411431e0-e8b7-467b-b5e0-f676ba4f2910" +version = "0.1.6" + +[[deps.FFMPEG]] +deps = ["FFMPEG_jll"] +git-tree-sha1 = "95ecf07c2eea562b5adbd0696af6db62c0f52560" +uuid = "c87230d0-a227-11e9-1b43-d7ebe4e7570a" +version = "0.4.5" + +[[deps.FFMPEG_jll]] +deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libva_jll", "libvorbis_jll", "x264_jll", "x265_jll"] +git-tree-sha1 = "66381d7059b5f3f6162f28831854008040a4e905" +uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5" +version = "8.0.1+1" + +[[deps.FFTA]] +deps = ["AbstractFFTs", "DocStringExtensions", "LinearAlgebra", "MuladdMacro", "Primes", "Random", "Reexport"] +git-tree-sha1 = "65e55303b72f4a567a51b174dd2c47496efeb95a" +uuid = "b86e33f2-c0db-4aa1-a6e0-ab43e668529e" +version = "0.3.1" + +[[deps.FLoops]] +deps = ["BangBang", "Compat", "FLoopsBase", "InitialValues", "JuliaVariables", "MLStyle", "Serialization", "Setfield", "Transducers"] +git-tree-sha1 = "0a2e5873e9a5f54abb06418d57a8df689336a660" +uuid = "cc61a311-1640-44b5-9fba-1b764f453329" +version = "0.2.2" + +[[deps.FLoopsBase]] +deps = ["ContextVariablesX"] +git-tree-sha1 = "656f7a6859be8673bf1f35da5670246b923964f7" +uuid = "b9860ae5-e623-471e-878b-f6a53c775ea6" +version = "0.1.1" + +[[deps.FastClosures]] +git-tree-sha1 = "acebe244d53ee1b461970f8910c235b259e772ef" +uuid = "9aa1b823-49e4-5ca5-8b0f-3971ec8bab6a" +version = "0.3.2" + +[[deps.FileIO]] +deps = ["Pkg", "Requires", "UUIDs"] +git-tree-sha1 = "6522cfb3b8fe97bec632252263057996cbd3de20" +uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" +version = "1.18.0" +weakdeps = ["HTTP"] + + [deps.FileIO.extensions] + HTTPExt = "HTTP" + +[[deps.FilePaths]] +deps = ["FilePathsBase", "MacroTools", "Reexport"] +git-tree-sha1 = "a1b2fbfe98503f15b665ed45b3d149e5d8895e4c" +uuid = "8fc22ac5-c921-52a6-82fd-178b2807b824" +version = "0.9.0" + + [deps.FilePaths.extensions] + FilePathsGlobExt = "Glob" + FilePathsURIParserExt = "URIParser" + FilePathsURIsExt = "URIs" + + [deps.FilePaths.weakdeps] + Glob = "c27321d9-0574-5035-807b-f59d2c89b15c" + URIParser = "30578b45-9adc-5946-b283-645ec420af67" + URIs = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4" + +[[deps.FilePathsBase]] +deps = ["Compat", "Dates"] +git-tree-sha1 = "3bab2c5aa25e7840a4b065805c0cdfc01f3068d2" +uuid = "48062228-2e41-5def-b9a4-89aafe57970f" +version = "0.9.24" +weakdeps = ["Mmap", "Test"] + + [deps.FilePathsBase.extensions] + FilePathsBaseMmapExt = "Mmap" + FilePathsBaseTestExt = "Test" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +version = "1.11.0" + +[[deps.FillArrays]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "2f979084d1e13948a3352cf64a25df6bd3b4dca3" +uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" +version = "1.16.0" +weakdeps = ["PDMats", "SparseArrays", "StaticArrays", "Statistics"] + + [deps.FillArrays.extensions] + FillArraysPDMatsExt = "PDMats" + FillArraysSparseArraysExt = "SparseArrays" + FillArraysStaticArraysExt = "StaticArrays" + FillArraysStatisticsExt = "Statistics" + +[[deps.FixedPointNumbers]] +deps = ["Statistics"] +git-tree-sha1 = "05882d6995ae5c12bb5f36dd2ed3f61c98cbb172" +uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" +version = "0.8.5" + +[[deps.Fontconfig_jll]] +deps = ["Artifacts", "Bzip2_jll", "Expat_jll", "FreeType2_jll", "JLLWrappers", "Libdl", "Libuuid_jll", "Zlib_jll"] +git-tree-sha1 = "f85dac9a96a01087df6e3a749840015a0ca3817d" +uuid = "a3f928ae-7b40-5064-980b-68af3947d34b" +version = "2.17.1+0" + +[[deps.Format]] +git-tree-sha1 = "9c68794ef81b08086aeb32eeaf33531668d5f5fc" +uuid = "1fa38f19-a742-5d3f-a2b9-30dd87b9d5f8" +version = "1.3.7" + +[[deps.ForwardDiff]] +deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "LogExpFunctions", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions"] +git-tree-sha1 = "cddeab6487248a39dae1a960fff0ac17b2a28888" +uuid = "f6369f11-7733-5829-9624-2563aa707210" +version = "1.3.3" +weakdeps = ["StaticArrays"] + + [deps.ForwardDiff.extensions] + ForwardDiffStaticArraysExt = "StaticArrays" + +[[deps.FreeType]] +deps = ["CEnum", "FreeType2_jll"] +git-tree-sha1 = "907369da0f8e80728ab49c1c7e09327bf0d6d999" +uuid = "b38be410-82b0-50bf-ab77-7b57e271db43" +version = "4.1.1" + +[[deps.FreeType2_jll]] +deps = ["Artifacts", "Bzip2_jll", "JLLWrappers", "Libdl", "Zlib_jll"] +git-tree-sha1 = "70329abc09b886fd2c5d94ad2d9527639c421e3e" +uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7" +version = "2.14.3+1" + +[[deps.FreeTypeAbstraction]] +deps = ["BaseDirs", "ColorVectorSpace", "Colors", "FreeType", "GeometryBasics", "Mmap"] +git-tree-sha1 = "4ebb930ef4a43817991ba35db6317a05e59abd11" +uuid = "663a7486-cb36-511b-a19d-713bb74d65c9" +version = "0.10.8" + +[[deps.FriBidi_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "7a214fdac5ed5f59a22c2d9a885a16da1c74bbc7" +uuid = "559328eb-81f9-559d-9380-de523a88c83c" +version = "1.0.17+0" + +[[deps.FunctionWrappers]] +git-tree-sha1 = "d62485945ce5ae9c0c48f124a84998d755bae00e" +uuid = "069b7b12-0de2-55c6-9aab-29f3d0a68a2e" +version = "1.1.3" + +[[deps.FunctionWrappersWrappers]] +deps = ["FunctionWrappers", "PrecompileTools", "TruncatedStacktraces"] +git-tree-sha1 = "3e13d0b39d117a03d3fb5c88a039e94787a37fcb" +uuid = "77dc65aa-8811-40c2-897b-53d922fa7daf" +version = "1.4.0" + + [deps.FunctionWrappersWrappers.extensions] + FunctionWrappersWrappersEnzymeExt = ["Enzyme", "EnzymeCore"] + FunctionWrappersWrappersMooncakeExt = "Mooncake" + + [deps.FunctionWrappersWrappers.weakdeps] + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + +[[deps.Functors]] +deps = ["Compat", "ConstructionBase", "LinearAlgebra", "Random"] +git-tree-sha1 = "60a0339f28a233601cb74468032b5c302d5067de" +uuid = "d9f16b24-f501-4c13-a1f2-28368ffc5196" +version = "0.5.2" + +[[deps.Future]] +deps = ["Random"] +uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820" +version = "1.11.0" + +[[deps.GLFW_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Libglvnd_jll", "Xorg_libXcursor_jll", "Xorg_libXi_jll", "Xorg_libXinerama_jll", "Xorg_libXrandr_jll", "libdecor_jll", "xkbcommon_jll"] +git-tree-sha1 = "b7bfd56fa66616138dfe5237da4dc13bbd83c67f" +uuid = "0656b61e-2033-5cc2-a64a-77c0f6c09b89" +version = "3.4.1+0" + +[[deps.GLM]] +deps = ["Distributions", "LinearAlgebra", "Printf", "Reexport", "SparseArrays", "SpecialFunctions", "Statistics", "StatsAPI", "StatsBase", "StatsFuns", "StatsModels"] +git-tree-sha1 = "3bcb30438ee1655e3b9c42d97544de7addc9c589" +uuid = "38e38edf-8417-5370-95a0-9cbb8c7f171a" +version = "1.9.3" + +[[deps.GPUArrays]] +deps = ["Adapt", "GPUArraysCore", "KernelAbstractions", "LLVM", "LinearAlgebra", "Printf", "Random", "Reexport", "ScopedValues", "Serialization", "SparseArrays", "Statistics"] +git-tree-sha1 = "6487601563e4a1d1dab796e88b4548bf5544209e" +uuid = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" +version = "11.4.1" +weakdeps = ["JLD2"] + + [deps.GPUArrays.extensions] + JLD2Ext = "JLD2" + +[[deps.GPUArraysCore]] +deps = ["Adapt"] +git-tree-sha1 = "83cf05ab16a73219e5f6bd1bdfa9848fa24ac627" +uuid = "46192b85-c4d5-4398-a991-12ede77f4527" +version = "0.2.0" + +[[deps.GPUCompiler]] +deps = ["ExprTools", "InteractiveUtils", "LLVM", "Libdl", "Logging", "PrecompileTools", "Preferences", "Scratch", "Serialization", "TOML", "Tracy", "UUIDs"] +git-tree-sha1 = "fedfe5e7db7035271c3f58359007f971da1dde87" +uuid = "61eb1bfa-7361-4325-ad38-22787b887f55" +version = "1.9.1" + +[[deps.GPUToolbox]] +deps = ["LLVM"] +git-tree-sha1 = "a589b6c1a0eff953571f5d8b0474f5020831114d" +uuid = "096a3bc2-3ced-46d0-87f4-dd12716f4bfc" +version = "1.1.1" + +[[deps.GR]] +deps = ["Artifacts", "Base64", "DelimitedFiles", "Downloads", "GR_jll", "HTTP", "JSON", "Libdl", "LinearAlgebra", "Preferences", "Printf", "Qt6Wayland_jll", "Random", "Serialization", "Sockets", "TOML", "Tar", "Test", "p7zip_jll"] +git-tree-sha1 = "44716a1a667cb867ee0e9ec8edc31c3e4aa5afdc" +uuid = "28b8d3ca-fb5f-59d9-8090-bfdbd6d07a71" +version = "0.73.24" + + [deps.GR.extensions] + IJuliaExt = "IJulia" + + [deps.GR.weakdeps] + IJulia = "7073ff75-c697-5162-941a-fcdaad2a7d2a" + +[[deps.GR_jll]] +deps = ["Artifacts", "Bzip2_jll", "Cairo_jll", "FFMPEG_jll", "Fontconfig_jll", "FreeType2_jll", "GLFW_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "Pixman_jll", "Qt6Base_jll", "Zlib_jll", "libpng_jll"] +git-tree-sha1 = "be8a1b8065959e24fdc1b51402f39f3b6f0f6653" +uuid = "d2c73de3-f751-5644-a686-071e5b155ba9" +version = "0.73.24+0" + +[[deps.GeoFormatTypes]] +git-tree-sha1 = "7528a7956248c723d01a0a9b0447bf254bf4da52" +uuid = "68eda718-8dee-11e9-39e7-89f7f65f511f" +version = "0.4.5" + +[[deps.GeoInterface]] +deps = ["DataAPI", "Extents", "GeoFormatTypes"] +git-tree-sha1 = "2b0312a0c06b4408773c6dc1829b472ea706f058" +uuid = "cf35fbd7-0cd7-5166-be24-54bfbe79505f" +version = "1.6.1" +weakdeps = ["GeometryBasics", "Makie", "RecipesBase"] + + [deps.GeoInterface.extensions] + GeoInterfaceMakieExt = ["Makie", "GeometryBasics"] + GeoInterfaceRecipesBaseExt = "RecipesBase" + +[[deps.GeometryBasics]] +deps = ["EarCut_jll", "Extents", "IterTools", "LinearAlgebra", "PrecompileTools", "Random", "StaticArrays"] +git-tree-sha1 = "1f5a80f4ed9f5a4aada88fc2db456e637676414b" +uuid = "5c1252a2-5f33-56bf-86c9-59e7332b4326" +version = "0.5.10" +weakdeps = ["GeoInterface"] + + [deps.GeometryBasics.extensions] + GeometryBasicsGeoInterfaceExt = "GeoInterface" + +[[deps.GettextRuntime_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Libiconv_jll"] +git-tree-sha1 = "45288942190db7c5f760f59c04495064eedf9340" +uuid = "b0724c58-0f36-5564-988d-3bb0596ebc4a" +version = "0.22.4+0" + +[[deps.Ghostscript_jll]] +deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Zlib_jll"] +git-tree-sha1 = "38044a04637976140074d0b0621c1edf0eb531fd" +uuid = "61579ee1-b43e-5ca0-a5da-69d92c66a64b" +version = "9.55.1+0" + +[[deps.Giflib_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "6570366d757b50fabae9f4315ad74d2e40c0560a" +uuid = "59f7168a-df46-5410-90c8-f2779963d0ec" +version = "5.2.3+0" + +[[deps.Glib_jll]] +deps = ["Artifacts", "GettextRuntime_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Libiconv_jll", "Libmount_jll", "PCRE2_jll", "Zlib_jll"] +git-tree-sha1 = "24f6def62397474a297bfcec22384101609142ed" +uuid = "7746bdde-850d-59dc-9ae8-88ece973131d" +version = "2.86.3+0" + +[[deps.Graphics]] +deps = ["Colors", "LinearAlgebra", "NaNMath"] +git-tree-sha1 = "a641238db938fff9b2f60d08ed9030387daf428c" +uuid = "a2bd30eb-e257-5431-a919-1863eab51364" +version = "1.1.3" + +[[deps.Graphite2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "8a6dbda1fd736d60cc477d99f2e7a042acfa46e8" +uuid = "3b182d85-2403-5c21-9c21-1e1f0cc25472" +version = "1.3.15+0" + +[[deps.GridLayoutBase]] +deps = ["GeometryBasics", "InteractiveUtils", "Observables"] +git-tree-sha1 = "93d5c27c8de51687a2c70ec0716e6e76f298416f" +uuid = "3955a311-db13-416c-9275-1d80ed98e5e9" +version = "0.11.2" + +[[deps.Grisu]] +git-tree-sha1 = "53bb909d1151e57e2484c3d1b53e19552b887fb2" +uuid = "42e2da0e-8278-4e71-bc24-59509adca0fe" +version = "1.0.2" + +[[deps.HTTP]] +deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "PrecompileTools", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"] +git-tree-sha1 = "51059d23c8bb67911a2e6fd5130229113735fc7e" +uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" +version = "1.11.0" + +[[deps.HarfBuzz_jll]] +deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll"] +git-tree-sha1 = "f923f9a774fcf3f5cb761bfa43aeadd689714813" +uuid = "2e76f6c2-a576-52d4-95c1-20adfe4de566" +version = "8.5.1+0" + +[[deps.HashArrayMappedTries]] +git-tree-sha1 = "2eaa69a7cab70a52b9687c8bf950a5a93ec895ae" +uuid = "076d061b-32b6-4027-95e0-9a2c6f6d7e74" +version = "0.2.0" + +[[deps.HostCPUFeatures]] +deps = ["BitTwiddlingConvenienceFunctions", "IfElse", "Libdl", "Preferences", "Static"] +git-tree-sha1 = "af9ab7d1f70739a47f03be78771ebda38c3c71bf" +uuid = "3e5b6fbb-0976-4d2c-9146-d79de83f2fb0" +version = "0.1.18" + +[[deps.HybridVariationalInference]] +deps = ["Bijectors", "BlockDiagonals", "ChainRulesCore", "Clustering", "Combinatorics", "CommonSolve", "ComponentArrays", "DifferentiationInterface", "Distances", "DistributionFits", "Distributions", "FillArrays", "Functors", "GPUArraysCore", "IterTools", "KernelAbstractions", "LinearAlgebra", "LogExpFunctions", "MLDataDevices", "MLUtils", "Missings", "NaNMath", "Optimisers", "Optimization", "PDMats", "Random", "StableRNGs", "StaticArrays", "StatsBase", "StatsFuns", "Test", "UnPack", "Zygote"] +path = "../.." +uuid = "a108c475-a4e2-4021-9a84-cfa7df242f64" +version = "0.2.0" + + [deps.HybridVariationalInference.extensions] + HybridVariationalInferenceCUDAExt = "CUDA" + HybridVariationalInferenceFluxExt = "Flux" + HybridVariationalInferenceLuxExt = "Lux" + HybridVariationalInferenceSimpleChainsExt = "SimpleChains" + + [deps.HybridVariationalInference.weakdeps] + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" + Lux = "b2108857-7c20-44ae-9111-449ecde12c47" + SimpleChains = "de6bee2f-e2f4-4ec7-b6ed-219cc6f6e9e5" + +[[deps.HypergeometricFunctions]] +deps = ["LinearAlgebra", "OpenLibm_jll", "SpecialFunctions"] +git-tree-sha1 = "68c173f4f449de5b438ee67ed0c9c748dc31a2ec" +uuid = "34004b35-14d8-5ef3-9330-4cdb6864b03a" +version = "0.3.28" + +[[deps.IRTools]] +deps = ["InteractiveUtils", "MacroTools"] +git-tree-sha1 = "57e9ce6cf68d0abf5cb6b3b4abf9bedf05c939c0" +uuid = "7869d1d1-7146-5819-86e3-90919afe41df" +version = "0.4.15" + +[[deps.IfElse]] +git-tree-sha1 = "debdd00ffef04665ccbb3e150747a77560e8fad1" +uuid = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173" +version = "0.1.1" + +[[deps.ImageAxes]] +deps = ["AxisArrays", "ImageBase", "ImageCore", "Reexport", "SimpleTraits"] +git-tree-sha1 = "e12629406c6c4442539436581041d372d69c55ba" +uuid = "2803e5a7-5153-5ecf-9a86-9b4c37f5f5ac" +version = "0.6.12" + +[[deps.ImageBase]] +deps = ["ImageCore", "Reexport"] +git-tree-sha1 = "eb49b82c172811fd2c86759fa0553a2221feb909" +uuid = "c817782e-172a-44cc-b673-b171935fbb9e" +version = "0.1.7" + +[[deps.ImageCore]] +deps = ["ColorVectorSpace", "Colors", "FixedPointNumbers", "MappedArrays", "MosaicViews", "OffsetArrays", "PaddedViews", "PrecompileTools", "Reexport"] +git-tree-sha1 = "8c193230235bbcee22c8066b0374f63b5683c2d3" +uuid = "a09fc81d-aa75-5fe9-8630-4744c3626534" +version = "0.10.5" + +[[deps.ImageIO]] +deps = ["FileIO", "IndirectArrays", "JpegTurbo", "LazyModules", "Netpbm", "OpenEXR", "PNGFiles", "QOI", "Sixel", "TiffImages", "UUIDs", "WebP"] +git-tree-sha1 = "696144904b76e1ca433b886b4e7edd067d76cbf7" +uuid = "82e4d734-157c-48bb-816b-45c225c6df19" +version = "0.6.9" + +[[deps.ImageMetadata]] +deps = ["AxisArrays", "ImageAxes", "ImageBase", "ImageCore"] +git-tree-sha1 = "2a81c3897be6fbcde0802a0ebe6796d0562f63ec" +uuid = "bc367c6b-8a6b-528e-b4bd-a4b897500b49" +version = "0.9.10" + +[[deps.Imath_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "dcc8d0cd653e55213df9b75ebc6fe4a8d3254c65" +uuid = "905a6f67-0a94-5f89-b386-d35d92009cd1" +version = "3.2.2+0" + +[[deps.Indexing]] +git-tree-sha1 = "ce1566720fd6b19ff3411404d4b977acd4814f9f" +uuid = "313cdc1a-70c2-5d6a-ae34-0150d3930a38" +version = "1.1.1" + +[[deps.IndirectArrays]] +git-tree-sha1 = "012e604e1c7458645cb8b436f8fba789a51b257f" +uuid = "9b13fd28-a010-5f03-acff-a1bbcff69959" +version = "1.0.0" + +[[deps.Inflate]] +git-tree-sha1 = "d1b1b796e47d94588b3757fe84fbf65a5ec4a80d" +uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9" +version = "0.1.5" + +[[deps.InitialValues]] +git-tree-sha1 = "4da0f88e9a39111c2fa3add390ab15f3a44f3ca3" +uuid = "22cec73e-a1b8-11e9-2c92-598750a2cf9c" +version = "0.3.1" + +[[deps.InlineStrings]] +git-tree-sha1 = "8f3d257792a522b4601c24a577954b0a8cd7334d" +uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48" +version = "1.4.5" + + [deps.InlineStrings.extensions] + ArrowTypesExt = "ArrowTypes" + ParsersExt = "Parsers" + + [deps.InlineStrings.weakdeps] + ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd" + Parsers = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" + +[[deps.IntegerMathUtils]] +git-tree-sha1 = "4c1acff2dc6b6967e7e750633c50bc3b8d83e617" +uuid = "18e54dd8-cb9d-406c-a71d-865a43cbb235" +version = "0.1.3" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.Interpolations]] +deps = ["Adapt", "AxisAlgorithms", "ChainRulesCore", "LinearAlgebra", "OffsetArrays", "Random", "Ratios", "SharedArrays", "SparseArrays", "StaticArrays", "WoodburyMatrices"] +git-tree-sha1 = "65d505fa4c0d7072990d659ef3fc086eb6da8208" +uuid = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" +version = "0.16.2" +weakdeps = ["ForwardDiff", "Unitful"] + + [deps.Interpolations.extensions] + InterpolationsForwardDiffExt = "ForwardDiff" + InterpolationsUnitfulExt = "Unitful" + +[[deps.IntervalArithmetic]] +deps = ["CRlibm", "CoreMath", "MacroTools", "OpenBLASConsistentFPCSR_jll", "Printf", "Random", "RoundingEmulator"] +git-tree-sha1 = "f1c42fcaca2d8034fe392f3e86c2e0809f75b2a1" +uuid = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253" +version = "1.0.6" + + [deps.IntervalArithmetic.extensions] + IntervalArithmeticArblibExt = "Arblib" + IntervalArithmeticDiffRulesExt = "DiffRules" + IntervalArithmeticForwardDiffExt = "ForwardDiff" + IntervalArithmeticIntervalSetsExt = "IntervalSets" + IntervalArithmeticIrrationalConstantsExt = "IrrationalConstants" + IntervalArithmeticLinearAlgebraExt = "LinearAlgebra" + IntervalArithmeticRecipesBaseExt = "RecipesBase" + IntervalArithmeticSparseArraysExt = "SparseArrays" + + [deps.IntervalArithmetic.weakdeps] + Arblib = "fb37089c-8514-4489-9461-98f9c8763369" + DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + IntervalSets = "8197267c-284f-5f27-9208-e0e47529a953" + IrrationalConstants = "92d709cd-6900-40b7-9082-c6be49f344b6" + LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" + RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + +[[deps.IntervalSets]] +git-tree-sha1 = "79d6bd28c8d9bccc2229784f1bd637689b256377" +uuid = "8197267c-284f-5f27-9208-e0e47529a953" +version = "0.7.14" +weakdeps = ["Random", "RecipesBase", "Statistics"] + + [deps.IntervalSets.extensions] + IntervalSetsRandomExt = "Random" + IntervalSetsRecipesBaseExt = "RecipesBase" + IntervalSetsStatisticsExt = "Statistics" + +[[deps.InverseFunctions]] +git-tree-sha1 = "a779299d77cd080bf77b97535acecd73e1c5e5cb" +uuid = "3587e190-3f89-42d0-90ee-14403ec27112" +version = "0.1.17" +weakdeps = ["Dates", "Test"] + + [deps.InverseFunctions.extensions] + InverseFunctionsDatesExt = "Dates" + InverseFunctionsTestExt = "Test" + +[[deps.InvertedIndices]] +git-tree-sha1 = "6da3c4316095de0f5ee2ebd875df8721e7e0bdbe" +uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f" +version = "1.3.1" + +[[deps.IrrationalConstants]] +git-tree-sha1 = "b2d91fe939cae05960e760110b328288867b5758" +uuid = "92d709cd-6900-40b7-9082-c6be49f344b6" +version = "0.2.6" + +[[deps.Isoband]] +deps = ["isoband_jll"] +git-tree-sha1 = "f9b6d97355599074dc867318950adaa6f9946137" +uuid = "f1662d9f-8043-43de-a69a-05efc1cc6ff4" +version = "0.1.1" + +[[deps.IterTools]] +git-tree-sha1 = "42d5f897009e7ff2cf88db414a389e5ed1bdd023" +uuid = "c8e1da08-722c-5040-9ed9-7db0dc04731e" +version = "1.10.0" + +[[deps.IteratorInterfaceExtensions]] +git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856" +uuid = "82899510-4779-5014-852e-03e436cf321d" +version = "1.0.0" + +[[deps.JLD2]] +deps = ["ChunkCodecLibZlib", "ChunkCodecLibZstd", "FileIO", "MacroTools", "Mmap", "OrderedCollections", "PrecompileTools", "ScopedValues"] +git-tree-sha1 = "941f87a0ae1b14d1ac2fa57245425b23a9d7a516" +uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819" +version = "0.6.4" +weakdeps = ["UnPack"] + + [deps.JLD2.extensions] + UnPackExt = "UnPack" + +[[deps.JLFzf]] +deps = ["REPL", "Random", "fzf_jll"] +git-tree-sha1 = "82f7acdc599b65e0f8ccd270ffa1467c21cb647b" +uuid = "1019f520-868f-41f5-a6de-eb00f4b6a39c" +version = "0.1.11" + +[[deps.JLLWrappers]] +deps = ["Artifacts", "Preferences"] +git-tree-sha1 = "0533e564aae234aff59ab625543145446d8b6ec2" +uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" +version = "1.7.1" + +[[deps.JSON]] +deps = ["Dates", "Logging", "Parsers", "PrecompileTools", "StructUtils", "UUIDs", "Unicode"] +git-tree-sha1 = "67c6f1f085cb2671c93fe34244c9cccde30f7a26" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "1.5.0" + + [deps.JSON.extensions] + JSONArrowExt = ["ArrowTypes"] + + [deps.JSON.weakdeps] + ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd" + +[[deps.Jieko]] +deps = ["ExproniconLite"] +git-tree-sha1 = "2f05ed29618da60c06a87e9c033982d4f71d0b6c" +uuid = "ae98c720-c025-4a4a-838c-29b094483192" +version = "0.2.1" + +[[deps.JpegTurbo]] +deps = ["CEnum", "FileIO", "ImageCore", "JpegTurbo_jll", "TOML"] +git-tree-sha1 = "9496de8fb52c224a2e3f9ff403947674517317d9" +uuid = "b835a17e-a41a-41e7-81f0-2f016b05efe0" +version = "0.1.6" + +[[deps.JpegTurbo_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "c0c9b76f3520863909825cbecdef58cd63de705a" +uuid = "aacddb02-875f-59d6-b918-886e6ef4fbf8" +version = "3.1.5+0" + +[[deps.JuliaNVTXCallbacks_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "af433a10f3942e882d3c671aacb203e006a5808f" +uuid = "9c1d0b0a-7046-5b2e-a33f-ea22f176ac7e" +version = "0.2.1+0" + +[[deps.JuliaVariables]] +deps = ["MLStyle", "NameResolution"] +git-tree-sha1 = "49fb3cb53362ddadb4415e9b73926d6b40709e70" +uuid = "b14d175d-62b4-44ba-8fb7-3064adc8c3ec" +version = "0.2.4" + +[[deps.KernelAbstractions]] +deps = ["Adapt", "Atomix", "InteractiveUtils", "MacroTools", "PrecompileTools", "Requires", "StaticArrays", "UUIDs"] +git-tree-sha1 = "f2e76d3ced51a2a9e185abc0b97494c7273f649f" +uuid = "63c18a36-062a-441e-b654-da1e3ab1ce7c" +version = "0.9.41" +weakdeps = ["EnzymeCore", "LinearAlgebra", "SparseArrays"] + + [deps.KernelAbstractions.extensions] + EnzymeExt = "EnzymeCore" + LinearAlgebraExt = "LinearAlgebra" + SparseArraysExt = "SparseArrays" + +[[deps.KernelDensity]] +deps = ["Distributions", "DocStringExtensions", "FFTA", "Interpolations", "StatsBase"] +git-tree-sha1 = "4260cfc991b8885bf747801fb60dd4503250e478" +uuid = "5ab0869b-81aa-558d-bb23-cbf5423bbe9b" +version = "0.6.11" + +[[deps.LAME_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "059aabebaa7c82ccb853dd4a0ee9d17796f7e1bc" +uuid = "c1c5ebd0-6772-5130-a774-d5fcae4a789d" +version = "3.100.3+0" + +[[deps.LERC_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "17b94ecafcfa45e8360a4fc9ca6b583b049e4e37" +uuid = "88015f11-f218-50d7-93a8-a6af411a945d" +version = "4.1.0+0" + +[[deps.LLVM]] +deps = ["CEnum", "LLVMExtra_jll", "Libdl", "Preferences", "Printf", "Unicode"] +git-tree-sha1 = "69e4739502b7ab5176117e97e1664ed181c35036" +uuid = "929cbde3-209d-540e-8aea-75f648917ca0" +version = "9.4.6" +weakdeps = ["BFloat16s"] + + [deps.LLVM.extensions] + BFloat16sExt = "BFloat16s" + +[[deps.LLVMExtra_jll]] +deps = ["Artifacts", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"] +git-tree-sha1 = "8e76807afb59ebb833e9b131ebf1a8c006510f33" +uuid = "dad2f222-ce93-54a1-a47d-0025e8a3acab" +version = "0.0.38+0" + +[[deps.LLVMLoopInfo]] +git-tree-sha1 = "2e5c102cfc41f48ae4740c7eca7743cc7e7b75ea" +uuid = "8b046642-f1f6-4319-8d3c-209ddc03c586" +version = "1.0.0" + +[[deps.LLVMOpenMP_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "eb62a3deb62fc6d8822c0c4bef73e4412419c5d8" +uuid = "1d63c593-3942-5779-bab2-d838dc0a180e" +version = "18.1.8+0" + +[[deps.LaTeXStrings]] +git-tree-sha1 = "dda21b8cbd6a6c40d9d02a73230f9d70fed6918c" +uuid = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f" +version = "1.4.0" + +[[deps.Latexify]] +deps = ["Format", "Ghostscript_jll", "InteractiveUtils", "LaTeXStrings", "MacroTools", "Markdown", "OrderedCollections", "Requires"] +git-tree-sha1 = "44f93c47f9cd6c7e431f2f2091fcba8f01cd7e8f" +uuid = "23fbe1c1-3f47-55db-b15f-69d7ec21a316" +version = "0.16.10" + + [deps.Latexify.extensions] + DataFramesExt = "DataFrames" + SparseArraysExt = "SparseArrays" + SymEngineExt = "SymEngine" + TectonicExt = "tectonic_jll" + + [deps.Latexify.weakdeps] + DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + SymEngine = "123dc426-2d89-5057-bbad-38513e3affd8" + tectonic_jll = "d7dd28d6-a5e6-559c-9131-7eb760cdacc5" + +[[deps.LayoutPointers]] +deps = ["ArrayInterface", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface"] +git-tree-sha1 = "a9eaadb366f5493a5654e843864c13d8b107548c" +uuid = "10f19ff3-798f-405d-979b-55457f8fc047" +version = "0.1.17" + +[[deps.LazyArtifacts]] +deps = ["Artifacts", "Pkg"] +uuid = "4af54fe1-eca0-43a8-85a7-787d91b784e3" +version = "1.11.0" + +[[deps.LazyModules]] +git-tree-sha1 = "a560dd966b386ac9ae60bdd3a3d3a326062d3c3e" +uuid = "8cdb02fc-e678-4876-92c5-9defec4f444e" +version = "0.3.1" + +[[deps.LeftChildRightSiblingTrees]] +deps = ["AbstractTrees"] +git-tree-sha1 = "95ba48564903b43b2462318aa243ee79d81135ff" +uuid = "1d6d02ad-be62-4b6b-8a6d-2f90e265016e" +version = "0.2.1" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.4" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.6.0+0" + +[[deps.LibGit2]] +deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +version = "1.11.0" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.7.2+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.0+1" + +[[deps.LibTracyClient_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "d4e20500d210247322901841d4eafc7a0c52642d" +uuid = "ad6e5548-8b26-5c9f-8ef3-ef0ad883f3a5" +version = "0.13.1+0" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" +version = "1.11.0" + +[[deps.Libffi_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "c8da7e6a91781c41a863611c7e966098d783c57a" +uuid = "e9f186c6-92d2-5b65-8a66-fee21dc1b490" +version = "3.4.7+0" + +[[deps.Libglvnd_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll", "Xorg_libXext_jll"] +git-tree-sha1 = "d36c21b9e7c172a44a10484125024495e2625ac0" +uuid = "7e76a0d4-f3c7-5321-8279-8d96eeed0f29" +version = "1.7.1+1" + +[[deps.Libiconv_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "be484f5c92fad0bd8acfef35fe017900b0b73809" +uuid = "94ce4f54-9a6c-5748-9c1c-f9c7231a4531" +version = "1.18.0+0" + +[[deps.Libmount_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "97bbca976196f2a1eb9607131cb108c69ec3f8a6" +uuid = "4b2f31a3-9ecc-558c-b454-b3730dcb73e9" +version = "2.41.3+0" + +[[deps.Libtiff_jll]] +deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "LERC_jll", "Libdl", "XZ_jll", "Zlib_jll", "Zstd_jll"] +git-tree-sha1 = "f04133fe05eff1667d2054c53d59f9122383fe05" +uuid = "89763e89-9b03-5906-acba-b20f662cd828" +version = "4.7.2+0" + +[[deps.Libuuid_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "d0205286d9eceadc518742860bf23f703779a3d6" +uuid = "38a345b3-de98-5d2b-a5d3-14cd9215e700" +version = "2.41.3+0" + +[[deps.LinearAlgebra]] +deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"] +uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +version = "1.11.0" + +[[deps.Loess]] +deps = ["Distances", "LinearAlgebra", "Statistics", "StatsAPI", "StatsFuns"] +git-tree-sha1 = "b1ad83b367b915e2dc485dee3d62a6a6317d7ad4" +uuid = "4345ca2d-374a-55d4-8d30-97f9976e7612" +version = "0.6.5" + +[[deps.LogDensityProblems]] +deps = ["ArgCheck", "DocStringExtensions", "Random"] +git-tree-sha1 = "d9625f27ded4ad726ceca7819394a4cc77ed25b3" +uuid = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" +version = "2.2.0" + +[[deps.LogExpFunctions]] +deps = ["DocStringExtensions", "IrrationalConstants", "LinearAlgebra"] +git-tree-sha1 = "13ca9e2586b89836fd20cccf56e57e2b9ae7f38f" +uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +version = "0.3.29" +weakdeps = ["ChainRulesCore", "ChangesOfVariables", "InverseFunctions"] + + [deps.LogExpFunctions.extensions] + LogExpFunctionsChainRulesCoreExt = "ChainRulesCore" + LogExpFunctionsChangesOfVariablesExt = "ChangesOfVariables" + LogExpFunctionsInverseFunctionsExt = "InverseFunctions" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.LoggingExtras]] +deps = ["Dates", "Logging"] +git-tree-sha1 = "f00544d95982ea270145636c181ceda21c4e2575" +uuid = "e6f89c97-d47a-5376-807f-9c37f3926c36" +version = "1.2.0" + +[[deps.LoopVectorization]] +deps = ["ArrayInterface", "CPUSummary", "CloseOpenIntervals", "DocStringExtensions", "HostCPUFeatures", "IfElse", "LayoutPointers", "LinearAlgebra", "OffsetArrays", "PolyesterWeave", "PrecompileTools", "SIMDTypes", "SLEEFPirates", "Static", "StaticArrayInterface", "ThreadingUtilities", "UnPack", "VectorizationBase"] +git-tree-sha1 = "a9fc7883eb9b5f04f46efb9a540833d1fad974b3" +uuid = "bdcacae8-1622-11e9-2a5c-532679323890" +version = "0.12.173" +weakdeps = ["ChainRulesCore", "ForwardDiff", "NNlib", "SpecialFunctions"] + + [deps.LoopVectorization.extensions] + ForwardDiffExt = ["ChainRulesCore", "ForwardDiff"] + ForwardDiffNNlibExt = ["ForwardDiff", "NNlib"] + SpecialFunctionsExt = "SpecialFunctions" + +[[deps.Lux]] +deps = ["ADTypes", "Adapt", "ArrayInterface", "ChainRulesCore", "ConcreteStructs", "DiffResults", "DispatchDoctor", "EnzymeCore", "FastClosures", "ForwardDiff", "Functors", "GPUArraysCore", "LinearAlgebra", "LuxCore", "LuxLib", "MLDataDevices", "MacroTools", "Markdown", "NNlib", "Optimisers", "PrecompileTools", "Preferences", "Random", "ReactantCore", "Reexport", "SciMLPublic", "Setfield", "Static", "StaticArraysCore", "Statistics", "UUIDs", "WeightInitializers"] +git-tree-sha1 = "334de475ff414c8eb67f88f57f7b02d40cd8f320" +uuid = "b2108857-7c20-44ae-9111-449ecde12c47" +version = "1.31.3" + + [deps.Lux.extensions] + ComponentArraysExt = "ComponentArrays" + EnzymeExt = "Enzyme" + FluxExt = "Flux" + GPUArraysExt = "GPUArrays" + LossFunctionsExt = "LossFunctions" + MLUtilsExt = "MLUtils" + MPIExt = "MPI" + MPINCCLExt = ["CUDA", "MPI", "NCCL"] + MooncakeExt = "Mooncake" + ReactantExt = ["Enzyme", "Reactant"] + ReverseDiffExt = ["FunctionWrappers", "ReverseDiff"] + SimpleChainsExt = "SimpleChains" + TrackerExt = "Tracker" + ZygoteExt = "Zygote" + + [deps.Lux.weakdeps] + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" + FunctionWrappers = "069b7b12-0de2-55c6-9aab-29f3d0a68a2e" + GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" + LossFunctions = "30fc2ffe-d236-52d8-8643-a9d8f7c094a7" + MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" + MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + NCCL = "3fe64909-d7a1-4096-9b7d-7a0f12cf0f6b" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SimpleChains = "de6bee2f-e2f4-4ec7-b6ed-219cc6f6e9e5" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[[deps.LuxCore]] +deps = ["DispatchDoctor", "Random", "SciMLPublic"] +git-tree-sha1 = "9455b1e829d8dacad236143869be70b7fdb826b8" +uuid = "bb33d45b-7691-41d6-9220-0943567d0623" +version = "1.5.3" + + [deps.LuxCore.extensions] + ArrayInterfaceReverseDiffExt = ["ArrayInterface", "ReverseDiff"] + ArrayInterfaceTrackerExt = ["ArrayInterface", "Tracker"] + ChainRulesCoreExt = "ChainRulesCore" + EnzymeCoreExt = "EnzymeCore" + FluxExt = "Flux" + FunctorsExt = "Functors" + MLDataDevicesExt = ["Adapt", "MLDataDevices"] + ReactantExt = "Reactant" + SetfieldExt = "Setfield" + + [deps.LuxCore.weakdeps] + Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" + ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" + Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" + Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" + MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + +[[deps.LuxLib]] +deps = ["ArrayInterface", "CPUSummary", "ChainRulesCore", "DispatchDoctor", "EnzymeCore", "FastClosures", "Functors", "KernelAbstractions", "LinearAlgebra", "LuxCore", "MLDataDevices", "Markdown", "NNlib", "Preferences", "Random", "Reexport", "SciMLPublic", "Static", "StaticArraysCore", "Statistics", "UUIDs"] +git-tree-sha1 = "77f3257b18e9fedd39b7b7990f0d3a0800a834ae" +uuid = "82251201-b29d-42c6-8e01-566dec8acb11" +version = "1.15.6" + + [deps.LuxLib.extensions] + AppleAccelerateExt = "AppleAccelerate" + BLISBLASExt = "BLISBLAS" + CUDAExt = "CUDA" + CUDAForwardDiffExt = ["CUDA", "ForwardDiff"] + EnzymeExt = "Enzyme" + ForwardDiffExt = "ForwardDiff" + LoopVectorizationExt = ["LoopVectorization", "Polyester"] + MKLExt = "MKL" + OctavianExt = ["Octavian", "LoopVectorization"] + OneHotArraysExt = ["OneHotArrays"] + ReactantExt = ["Reactant", "ReactantCore"] + ReverseDiffExt = "ReverseDiff" + SLEEFPiratesExt = "SLEEFPirates" + TrackerAMDGPUExt = ["AMDGPU", "Tracker"] + TrackerExt = "Tracker" + cuDNNExt = ["CUDA", "cuDNN"] + + [deps.LuxLib.weakdeps] + AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" + AppleAccelerate = "13e28ba4-7ad8-5781-acae-3021b1ed3924" + BLISBLAS = "6f275bd8-fec0-4d39-945b-7e95a765fa1e" + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + LoopVectorization = "bdcacae8-1622-11e9-2a5c-532679323890" + MKL = "33e6dc65-8f57-5167-99aa-e5a354878fb2" + Octavian = "6fd5a793-0b7e-452c-907f-f8bfe9c57db4" + OneHotArrays = "0b1bfda6-eb8a-41d2-88d8-f5af5cad476f" + Polyester = "f517fe37-dbe3-4b94-8317-1923a5111588" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + ReactantCore = "a3311ec8-5e00-46d5-b541-4f83e724a433" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SLEEFPirates = "476501e8-09a2-5ece-8869-fb82de89a1fa" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd" + +[[deps.MCMCDiagnosticTools]] +deps = ["AbstractFFTs", "DataAPI", "DataStructures", "Distributions", "LinearAlgebra", "MLJModelInterface", "Random", "SpecialFunctions", "Statistics", "StatsBase", "StatsFuns", "Tables"] +git-tree-sha1 = "2f464b68e84673727b4e4216a6254fba7da5cf4e" +uuid = "be115224-59cd-429b-ad48-344e309966f0" +version = "0.3.17" + +[[deps.MLCore]] +deps = ["DataAPI", "SimpleTraits", "Tables"] +git-tree-sha1 = "73907695f35bc7ffd9f11f6c4f2ee8c1302084be" +uuid = "c2834f40-e789-41da-a90e-33b280584a8c" +version = "1.0.0" + +[[deps.MLDataDevices]] +deps = ["Adapt", "Functors", "Preferences", "Random", "SciMLPublic"] +git-tree-sha1 = "39a69ca451c3e78b9a6a2e42ef894fdf7505e629" +uuid = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" +version = "1.17.5" + + [deps.MLDataDevices.extensions] + AMDGPUExt = "AMDGPU" + CUDAExt = "CUDA" + ChainRulesCoreExt = "ChainRulesCore" + ChainRulesExt = "ChainRules" + ComponentArraysExt = "ComponentArrays" + FillArraysExt = "FillArrays" + GPUArraysSparseArraysExt = ["GPUArrays", "SparseArrays"] + MLUtilsExt = "MLUtils" + MetalExt = ["GPUArrays", "Metal"] + OneHotArraysExt = "OneHotArrays" + OpenCLExt = ["GPUArrays", "OpenCL"] + ReactantExt = "Reactant" + RecursiveArrayToolsExt = "RecursiveArrayTools" + ReverseDiffExt = "ReverseDiff" + SparseArraysExt = "SparseArrays" + TrackerExt = "Tracker" + ZygoteExt = "Zygote" + cuDNNExt = ["CUDA", "cuDNN"] + oneAPIExt = ["GPUArrays", "oneAPI"] + + [deps.MLDataDevices.weakdeps] + AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" + FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" + GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" + MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" + Metal = "dde4c033-4e86-420c-a63e-0dd931031962" + OneHotArrays = "0b1bfda6-eb8a-41d2-88d8-f5af5cad476f" + OpenCL = "08131aa3-fb12-5dee-8b74-c09406e224a2" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + RecursiveArrayTools = "731186ca-8d62-57ce-b412-fbd966d074cd" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd" + oneAPI = "8f75cd03-7ff8-4ecb-9b8f-daf728133b1b" + +[[deps.MLJModelInterface]] +deps = ["InteractiveUtils", "REPL", "Random", "ScientificTypesBase", "StatisticalTraits"] +git-tree-sha1 = "c275fae2e693206b4527dd9d2382aa15359ef3ed" +uuid = "e80e1ace-859a-464e-9ed9-23947d8ae3ea" +version = "1.12.1" + +[[deps.MLStyle]] +git-tree-sha1 = "bc38dff0548128765760c79eb7388a4b37fae2c8" +uuid = "d8e11817-5142-5d16-987a-aa16d5891078" +version = "0.4.17" + +[[deps.MLUtils]] +deps = ["ChainRulesCore", "Compat", "DataAPI", "DelimitedFiles", "FLoops", "MLCore", "NNlib", "Random", "ShowCases", "SimpleTraits", "Statistics", "StatsBase", "Tables", "Transducers"] +git-tree-sha1 = "a772d8d1987433538a5c226f79393324b55f7846" +uuid = "f1d291b0-491e-4a28-83b9-f70985020b54" +version = "0.4.8" + +[[deps.MacroTools]] +git-tree-sha1 = "1e0228a030642014fe5cfe68c2c0a818f9e3f522" +uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" +version = "0.5.16" + +[[deps.Makie]] +deps = ["Animations", "Base64", "CRC32c", "ColorBrewer", "ColorSchemes", "ColorTypes", "Colors", "ComputePipeline", "Contour", "Dates", "DelaunayTriangulation", "Distributions", "DocStringExtensions", "Downloads", "FFMPEG_jll", "FileIO", "FilePaths", "FixedPointNumbers", "Format", "FreeType", "FreeTypeAbstraction", "GeometryBasics", "GridLayoutBase", "ImageBase", "ImageIO", "InteractiveUtils", "Interpolations", "IntervalSets", "InverseFunctions", "Isoband", "KernelDensity", "LaTeXStrings", "LinearAlgebra", "MacroTools", "Markdown", "MathTeXEngine", "Observables", "OffsetArrays", "PNGFiles", "Packing", "Pkg", "PlotUtils", "PolygonOps", "PrecompileTools", "Printf", "REPL", "Random", "RelocatableFolders", "Scratch", "ShaderAbstractions", "Showoff", "SignedDistanceFields", "SparseArrays", "Statistics", "StatsBase", "StatsFuns", "StructArrays", "TriplotBase", "UnicodeFun", "Unitful"] +git-tree-sha1 = "68af66ec16af8b152309310251ecb4fbfe39869f" +uuid = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a" +version = "0.24.9" + + [deps.Makie.extensions] + MakieDynamicQuantitiesExt = "DynamicQuantities" + + [deps.Makie.weakdeps] + DynamicQuantities = "06fc5a27-2a28-4c7c-a15d-362465fb6821" + +[[deps.ManualMemory]] +git-tree-sha1 = "bcaef4fc7a0cfe2cba636d84cda54b5e4e4ca3cd" +uuid = "d125e4d3-2237-4719-b19c-fa641b8a4667" +version = "0.1.8" + +[[deps.MappedArrays]] +git-tree-sha1 = "0ee4497a4e80dbd29c058fcee6493f5219556f40" +uuid = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900" +version = "0.4.3" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.MathTeXEngine]] +deps = ["AbstractTrees", "Automa", "DataStructures", "FreeTypeAbstraction", "GeometryBasics", "LaTeXStrings", "REPL", "RelocatableFolders", "UnicodeFun"] +git-tree-sha1 = "7eb8cdaa6f0e8081616367c10b31b9d9b34bb02a" +uuid = "0a4f8689-d25c-4efe-a92b-7142dfc1aa53" +version = "0.6.7" + +[[deps.MbedTLS]] +deps = ["Dates", "MbedTLS_jll", "MozillaCACerts_jll", "NetworkOptions", "Random", "Sockets"] +git-tree-sha1 = "8785729fa736197687541f7053f6d8ab7fc44f92" +uuid = "739be429-bea8-5141-9913-cc70e7f3736d" +version = "1.1.10" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.6+0" + +[[deps.Measures]] +git-tree-sha1 = "b513cedd20d9c914783d8ad83d08120702bf2c77" +uuid = "442fdcdd-2543-5da2-b0f3-8c86c306513e" +version = "0.3.3" + +[[deps.MicroCollections]] +deps = ["Accessors", "BangBang", "InitialValues"] +git-tree-sha1 = "44d32db644e84c75dab479f1bc15ee76a1a3618f" +uuid = "128add7d-3638-4c79-886c-908ea0c25c34" +version = "0.2.0" + +[[deps.Missings]] +deps = ["DataAPI"] +git-tree-sha1 = "ec4f7fbeab05d7747bdf98eb74d130a2a2ed298d" +uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" +version = "1.2.0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" +version = "1.11.0" + +[[deps.MosaicViews]] +deps = ["MappedArrays", "OffsetArrays", "PaddedViews", "StackViews"] +git-tree-sha1 = "7b86a5d4d70a9f5cdf2dacb3cbe6d251d1a61dbe" +uuid = "e94cdb99-869f-56ef-bcf0-1ae2bcbe0389" +version = "0.3.4" + +[[deps.Moshi]] +deps = ["ExproniconLite", "Jieko"] +git-tree-sha1 = "53f817d3e84537d84545e0ad749e483412dd6b2a" +uuid = "2e0e35c7-a2e4-4343-998d-7ef72827ed2d" +version = "0.3.7" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2023.12.12" + +[[deps.MuladdMacro]] +git-tree-sha1 = "cac9cc5499c25554cba55cd3c30543cff5ca4fab" +uuid = "46d2c3a1-f734-5fdb-9937-b9b9aeba4221" +version = "0.2.4" + +[[deps.MultivariateStats]] +deps = ["Arpack", "Distributions", "LinearAlgebra", "SparseArrays", "Statistics", "StatsAPI", "StatsBase"] +git-tree-sha1 = "7c3ff68a904d0f7404e5d2f7f5bc667934d8d616" +uuid = "6f286f6a-111f-5878-ab1e-185364afe411" +version = "0.10.4" + +[[deps.NNlib]] +deps = ["Adapt", "Atomix", "ChainRulesCore", "GPUArraysCore", "KernelAbstractions", "LinearAlgebra", "Random", "ScopedValues", "Statistics"] +git-tree-sha1 = "6dc9ffc3a9931e6b988f913b49630d0fb986d0a8" +uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" +version = "0.9.33" + + [deps.NNlib.extensions] + NNlibAMDGPUExt = "AMDGPU" + NNlibCUDACUDNNExt = ["CUDA", "cuDNN"] + NNlibCUDAExt = "CUDA" + NNlibEnzymeCoreExt = "EnzymeCore" + NNlibFFTWExt = "FFTW" + NNlibForwardDiffExt = "ForwardDiff" + NNlibMetalExt = "Metal" + NNlibSpecialFunctionsExt = "SpecialFunctions" + + [deps.NNlib.weakdeps] + AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" + FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + Metal = "dde4c033-4e86-420c-a63e-0dd931031962" + SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" + cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd" + +[[deps.NVTX]] +deps = ["JuliaNVTXCallbacks_jll", "Libdl", "NVTX_jll"] +git-tree-sha1 = "a9083c3e469e63cca454d1fc3b19472d9d92c14a" +uuid = "5da4648a-3479-48b8-97b9-01cb529c0a1f" +version = "1.0.3" +weakdeps = ["Colors"] + + [deps.NVTX.extensions] + NVTXColorsExt = "Colors" + +[[deps.NVTX_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "af2232f69447494514c25742ba1503ec7e9877fe" +uuid = "e98f9f5b-d649-5603-91fd-7774390e6439" +version = "3.2.2+0" + +[[deps.NaNMath]] +deps = ["OpenLibm_jll"] +git-tree-sha1 = "9b8215b1ee9e78a293f99797cd31375471b2bcae" +uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" +version = "1.1.3" + +[[deps.NameResolution]] +deps = ["PrettyPrint"] +git-tree-sha1 = "1a0fa0e9613f46c9b8c11eee38ebb4f590013c5e" +uuid = "71a1bf82-56d0-4bbc-8a3c-48b961074391" +version = "0.1.5" + +[[deps.NaturalSort]] +git-tree-sha1 = "eda490d06b9f7c00752ee81cfa451efe55521e21" +uuid = "c020b1a1-e9b0-503a-9c33-f039bfc54a85" +version = "1.0.0" + +[[deps.NearestNeighbors]] +deps = ["AbstractTrees", "Distances", "StaticArrays"] +git-tree-sha1 = "e2c3bba08dd6dedfe17a17889131b885b8c082f0" +uuid = "b8a86587-4115-5ab1-83bc-aa920d37bbce" +version = "0.4.27" + +[[deps.Netpbm]] +deps = ["FileIO", "ImageCore", "ImageMetadata"] +git-tree-sha1 = "d92b107dbb887293622df7697a2223f9f8176fcd" +uuid = "f09324ee-3d7c-5217-9330-fc30815ba969" +version = "1.1.1" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" + +[[deps.Observables]] +git-tree-sha1 = "7438a59546cf62428fc9d1bc94729146d37a7225" +uuid = "510215fc-4207-5dde-b226-833fc4488ee2" +version = "0.5.5" + +[[deps.OffsetArrays]] +git-tree-sha1 = "117432e406b5c023f665fa73dc26e79ec3630151" +uuid = "6fe1bfb0-de20-5000-8ca7-80f57d26f881" +version = "1.17.0" +weakdeps = ["Adapt"] + + [deps.OffsetArrays.extensions] + OffsetArraysAdaptExt = "Adapt" + +[[deps.Ogg_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "b6aa4566bb7ae78498a5e68943863fa8b5231b59" +uuid = "e7412a2a-1a6e-54c0-be00-318e2571c051" +version = "1.3.6+0" + +[[deps.OpenBLASConsistentFPCSR_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl"] +git-tree-sha1 = "f2b3b9e52a5eb6a3434c8cca67ad2dde011194f4" +uuid = "6cdc7f73-28fd-5e50-80fb-958a8875b1af" +version = "0.3.30+0" + +[[deps.OpenBLAS_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] +uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" +version = "0.3.27+1" + +[[deps.OpenEXR]] +deps = ["Colors", "FileIO", "OpenEXR_jll"] +git-tree-sha1 = "97db9e07fe2091882c765380ef58ec553074e9c7" +uuid = "52e1d378-f018-4a11-a4be-720524705ac7" +version = "0.3.3" + +[[deps.OpenEXR_jll]] +deps = ["Artifacts", "Imath_jll", "JLLWrappers", "Libdl", "Zlib_jll"] +git-tree-sha1 = "135492b7e97fc86d9b132b96a54d2d3dd3e0c6a8" +uuid = "18a262bb-aa17-5467-a713-aee519bc75cb" +version = "3.4.8+0" + +[[deps.OpenLibm_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "05823500-19ac-5b8b-9628-191a04bc5112" +version = "0.8.5+0" + +[[deps.OpenSSL]] +deps = ["BitFlags", "Dates", "MozillaCACerts_jll", "NetworkOptions", "OpenSSL_jll", "Sockets"] +git-tree-sha1 = "1d1aaa7d449b58415f97d2839c318b70ffb525a0" +uuid = "4d8831e6-92b7-49fb-bdf8-b643e874388c" +version = "1.6.1" + +[[deps.OpenSSL_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "2ac022577e5eac7da040de17776d51bb770cd895" +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "3.5.6+0" + +[[deps.OpenSpecFun_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl"] +git-tree-sha1 = "1346c9208249809840c91b26703912dff463d335" +uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" +version = "0.5.6+0" + +[[deps.Optimisers]] +deps = ["ChainRulesCore", "ConstructionBase", "Functors", "LinearAlgebra", "Random", "Statistics"] +git-tree-sha1 = "36b5d2b9dd06290cd65fcf5bdbc3a551ed133af5" +uuid = "3bd65402-5787-11e9-1adc-39752487f4e2" +version = "0.4.7" + + [deps.Optimisers.extensions] + OptimisersAdaptExt = ["Adapt"] + OptimisersEnzymeCoreExt = "EnzymeCore" + OptimisersReactantExt = "Reactant" + + [deps.Optimisers.weakdeps] + Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" + EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + +[[deps.Optimization]] +deps = ["ADTypes", "ArrayInterface", "ConsoleProgressMonitor", "DocStringExtensions", "LinearAlgebra", "Logging", "LoggingExtras", "OptimizationBase", "Printf", "Reexport", "SciMLBase", "SparseArrays", "TerminalLoggers"] +git-tree-sha1 = "2c409c814c2d745620fdd55391a66ee514561146" +uuid = "7f7a1694-90dd-40f0-9382-eb1efda571ba" +version = "5.5.0" + +[[deps.OptimizationBase]] +deps = ["ADTypes", "ArrayInterface", "DifferentiationInterface", "DocStringExtensions", "FastClosures", "LinearAlgebra", "PDMats", "PrecompileTools", "Reexport", "SciMLBase", "SciMLLogging", "SparseArrays", "SparseConnectivityTracer", "SparseMatrixColorings", "SymbolicIndexingInterface"] +git-tree-sha1 = "a3d7837832e515111c95a02df7dc55edbdf17d8a" +uuid = "bca83a33-5cc9-4baa-983d-23429ab6bcbb" +version = "5.1.0" + + [deps.OptimizationBase.extensions] + OptimizationChainRulesCoreExt = "ChainRulesCore" + OptimizationEnzymeExt = ["ChainRulesCore", "Enzyme"] + OptimizationFiniteDiffExt = "FiniteDiff" + OptimizationForwardDiffExt = "ForwardDiff" + OptimizationMLDataDevicesExt = "MLDataDevices" + OptimizationMLUtilsExt = "MLUtils" + OptimizationMooncakeExt = "Mooncake" + OptimizationReverseDiffExt = "ReverseDiff" + OptimizationSymbolicAnalysisExt = "SymbolicAnalysis" + OptimizationZygoteExt = "Zygote" + + [deps.OptimizationBase.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" + MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[[deps.OptimizationOptimisers]] +deps = ["Logging", "Optimisers", "OptimizationBase", "Reexport", "SciMLBase"] +git-tree-sha1 = "7caf4c41e3ee6d348381228b6517decea28867e3" +uuid = "42dfb2eb-d2b4-4451-abcd-913932933ac1" +version = "0.3.16" + +[[deps.Opus_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "e2bb57a313a74b8104064b7efd01406c0a50d2ff" +uuid = "91d4177d-7536-5919-b921-800302f37372" +version = "1.6.1+0" + +[[deps.OrderedCollections]] +git-tree-sha1 = "05868e21324cede2207c6f0f466b4bfef6d5e7ee" +uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" +version = "1.8.1" + +[[deps.PCRE2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "efcefdf7-47ab-520b-bdef-62a2eaa19f15" +version = "10.42.0+1" + +[[deps.PDMats]] +deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"] +git-tree-sha1 = "e4cff168707d441cd6bf3ff7e4832bdf34278e4a" +uuid = "90014a1f-27ba-587c-ab20-58faa44d9150" +version = "0.11.37" +weakdeps = ["StatsBase"] + + [deps.PDMats.extensions] + StatsBaseExt = "StatsBase" + +[[deps.PNGFiles]] +deps = ["Base64", "CEnum", "ImageCore", "IndirectArrays", "OffsetArrays", "libpng_jll"] +git-tree-sha1 = "cf181f0b1e6a18dfeb0ee8acc4a9d1672499626c" +uuid = "f57f5aa1-a3ce-4bc8-8ab9-96f992907883" +version = "0.4.4" + +[[deps.Packing]] +deps = ["GeometryBasics"] +git-tree-sha1 = "bc5bf2ea3d5351edf285a06b0016788a121ce92c" +uuid = "19eb6ba3-879d-56ad-ad62-d5c202156566" +version = "0.5.1" + +[[deps.PaddedViews]] +deps = ["OffsetArrays"] +git-tree-sha1 = "0fac6313486baae819364c52b4f483450a9d793f" +uuid = "5432bcbf-9aad-5242-b902-cca2824c8663" +version = "0.5.12" + +[[deps.PairPlots]] +deps = ["Contour", "Distributions", "KernelDensity", "LinearAlgebra", "MCMCDiagnosticTools", "Makie", "Measures", "Missings", "OrderedCollections", "PolygonOps", "PrecompileTools", "Printf", "Requires", "StaticArrays", "Statistics", "StatsBase", "TableOperations", "Tables"] +git-tree-sha1 = "ca8501a0912f6c1e6533904e28cc80a1f23e9247" +uuid = "43a3c2be-4208-490b-832a-a21dcd55d7da" +version = "3.0.3" + + [deps.PairPlots.extensions] + MCMCChainsExt = "MCMCChains" + PairPlotsDynamicQuantitiesExt = "DynamicQuantities" + PairPlotsDynamicUnitfulExt = "Unitful" + + [deps.PairPlots.weakdeps] + DynamicQuantities = "06fc5a27-2a28-4c7c-a15d-362465fb6821" + MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d" + Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" + +[[deps.Pango_jll]] +deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "FriBidi_jll", "Glib_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl"] +git-tree-sha1 = "0662b083e11420952f2e62e17eddae7fc07d5997" +uuid = "36c8627f-9965-5494-a995-c6b170f724f3" +version = "1.57.0+0" + +[[deps.Parsers]] +deps = ["Dates", "PrecompileTools", "UUIDs"] +git-tree-sha1 = "7d2f8f21da5db6a806faf7b9b292296da42b2810" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "2.8.3" + +[[deps.Pixman_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LLVMOpenMP_jll", "Libdl"] +git-tree-sha1 = "db76b1ecd5e9715f3d043cec13b2ec93ce015d53" +uuid = "30392449-352a-5448-841d-b1acce4e97dc" +version = "0.44.2+0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "Random", "SHA", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.11.0" +weakdeps = ["REPL"] + + [deps.Pkg.extensions] + REPLExt = "REPL" + +[[deps.PkgVersion]] +deps = ["Pkg"] +git-tree-sha1 = "f9501cc0430a26bc3d156ae1b5b0c1b47af4d6da" +uuid = "eebad327-c553-4316-9ea0-9fa01ccd7688" +version = "0.3.3" + +[[deps.PlotThemes]] +deps = ["PlotUtils", "Statistics"] +git-tree-sha1 = "41031ef3a1be6f5bbbf3e8073f210556daeae5ca" +uuid = "ccf2f8ad-2431-5c83-bf29-c5338b663b6a" +version = "3.3.0" + +[[deps.PlotUtils]] +deps = ["ColorSchemes", "Colors", "Dates", "PrecompileTools", "Printf", "Random", "Reexport", "StableRNGs", "Statistics"] +git-tree-sha1 = "26ca162858917496748aad52bb5d3be4d26a228a" +uuid = "995b91a9-d308-5afd-9ec6-746e21dbc043" +version = "1.4.4" + +[[deps.Plots]] +deps = ["Base64", "Contour", "Dates", "Downloads", "FFMPEG", "FixedPointNumbers", "GR", "JLFzf", "JSON", "LaTeXStrings", "Latexify", "LinearAlgebra", "Measures", "NaNMath", "Pkg", "PlotThemes", "PlotUtils", "PrecompileTools", "Printf", "REPL", "Random", "RecipesBase", "RecipesPipeline", "Reexport", "RelocatableFolders", "Requires", "Scratch", "Showoff", "SparseArrays", "Statistics", "StatsBase", "TOML", "UUIDs", "UnicodeFun", "Unzip"] +git-tree-sha1 = "cb20a4eacda080e517e4deb9cfb6c7c518131265" +uuid = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +version = "1.41.6" + + [deps.Plots.extensions] + FileIOExt = "FileIO" + GeometryBasicsExt = "GeometryBasics" + IJuliaExt = "IJulia" + ImageInTerminalExt = "ImageInTerminal" + UnitfulExt = "Unitful" + + [deps.Plots.weakdeps] + FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" + GeometryBasics = "5c1252a2-5f33-56bf-86c9-59e7332b4326" + IJulia = "7073ff75-c697-5162-941a-fcdaad2a7d2a" + ImageInTerminal = "d8c32880-2388-543b-8c61-d9f865259254" + Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" + +[[deps.Polyester]] +deps = ["ArrayInterface", "BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "ManualMemory", "PolyesterWeave", "Static", "StaticArrayInterface", "StrideArraysCore", "ThreadingUtilities"] +git-tree-sha1 = "16bbc30b5ebea91e9ce1671adc03de2832cff552" +uuid = "f517fe37-dbe3-4b94-8317-1923a5111588" +version = "0.7.19" + +[[deps.PolyesterWeave]] +deps = ["BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "Static", "ThreadingUtilities"] +git-tree-sha1 = "645bed98cd47f72f67316fd42fc47dee771aefcd" +uuid = "1d0040c9-8b98-4ee7-8388-3f51789ca0ad" +version = "0.2.2" + +[[deps.PolygonOps]] +git-tree-sha1 = "77b3d3605fc1cd0b42d95eba87dfcd2bf67d5ff6" +uuid = "647866c9-e3ac-4575-94e7-e3d426903924" +version = "0.1.2" + +[[deps.PooledArrays]] +deps = ["DataAPI", "Future"] +git-tree-sha1 = "36d8b4b899628fb92c2749eb488d884a926614d3" +uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720" +version = "1.4.3" + +[[deps.PreallocationTools]] +deps = ["Adapt", "ArrayInterface", "PrecompileTools"] +git-tree-sha1 = "e16b73bf892c55d16d53c9c0dbd0fb31cb7e25da" +uuid = "d236fae5-4411-538c-8e31-a6e3d9e00b46" +version = "1.2.0" + + [deps.PreallocationTools.extensions] + PreallocationToolsForwardDiffExt = "ForwardDiff" + PreallocationToolsReverseDiffExt = "ReverseDiff" + PreallocationToolsSparseConnectivityTracerExt = "SparseConnectivityTracer" + + [deps.PreallocationTools.weakdeps] + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" + +[[deps.PrecompileTools]] +deps = ["Preferences"] +git-tree-sha1 = "5aa36f7049a63a1528fe8f7c3f2113413ffd4e1f" +uuid = "aea7be01-6a6a-4083-8856-8a6e6704d82a" +version = "1.2.1" + +[[deps.Preferences]] +deps = ["TOML"] +git-tree-sha1 = "8b770b60760d4451834fe79dd483e318eee709c4" +uuid = "21216c6a-2e73-6563-6e65-726566657250" +version = "1.5.2" + +[[deps.PrettyPrint]] +git-tree-sha1 = "632eb4abab3449ab30c5e1afaa874f0b98b586e4" +uuid = "8162dcfd-2161-5ef2-ae6c-7681170c5f98" +version = "0.2.0" + +[[deps.PrettyTables]] +deps = ["Crayons", "LaTeXStrings", "Markdown", "PrecompileTools", "Printf", "REPL", "Reexport", "StringManipulation", "Tables"] +git-tree-sha1 = "624de6279ab7d94fc9f672f0068107eb6619732c" +uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d" +version = "3.3.2" + + [deps.PrettyTables.extensions] + PrettyTablesTypstryExt = "Typstry" + + [deps.PrettyTables.weakdeps] + Typstry = "f0ed7684-a786-439e-b1e3-3b82803b501e" + +[[deps.Primes]] +deps = ["IntegerMathUtils"] +git-tree-sha1 = "25cdd1d20cd005b52fc12cb6be3f75faaf59bb9b" +uuid = "27ebfcd6-29c5-5fa9-bf4b-fb8fc14df3ae" +version = "0.5.7" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +version = "1.11.0" + +[[deps.ProgressLogging]] +deps = ["Logging", "SHA", "UUIDs"] +git-tree-sha1 = "f0803bc1171e455a04124affa9c21bba5ac4db32" +uuid = "33c8b6b6-d38a-422a-b730-caa89a2f386c" +version = "0.1.6" + +[[deps.ProgressMeter]] +deps = ["Distributed", "Printf"] +git-tree-sha1 = "fbb92c6c56b34e1a2c4c36058f68f332bec840e7" +uuid = "92933f4c-e287-5a05-a399-4b506db050ca" +version = "1.11.0" + +[[deps.PtrArrays]] +git-tree-sha1 = "4fbbafbc6251b883f4d2705356f3641f3652a7fe" +uuid = "43287f4e-b6f4-7ad1-bb20-aadabca52c3d" +version = "1.4.0" + +[[deps.QOI]] +deps = ["ColorTypes", "FileIO", "FixedPointNumbers"] +git-tree-sha1 = "472daaa816895cb7aee81658d4e7aec901fa1106" +uuid = "4b34888f-f399-49d4-9bb3-47ed5cae4e65" +version = "1.0.2" + +[[deps.Qt6Base_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Fontconfig_jll", "Glib_jll", "JLLWrappers", "Libdl", "Libglvnd_jll", "OpenSSL_jll", "Vulkan_Loader_jll", "Xorg_libSM_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Xorg_libxcb_jll", "Xorg_xcb_util_cursor_jll", "Xorg_xcb_util_image_jll", "Xorg_xcb_util_keysyms_jll", "Xorg_xcb_util_renderutil_jll", "Xorg_xcb_util_wm_jll", "Zlib_jll", "libinput_jll", "xkbcommon_jll"] +git-tree-sha1 = "d7a4bff94f42208ce3cf6bc8e4e7d1d663e7ee8b" +uuid = "c0090381-4147-56d7-9ebc-da0b1113ec56" +version = "6.10.2+1" + +[[deps.Qt6Declarative_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll", "Qt6ShaderTools_jll", "Qt6Svg_jll"] +git-tree-sha1 = "d5b7dd0e226774cbd87e2790e34def09245c7eab" +uuid = "629bc702-f1f5-5709-abd5-49b8460ea067" +version = "6.10.2+1" + +[[deps.Qt6ShaderTools_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll"] +git-tree-sha1 = "4d85eedf69d875982c46643f6b4f66919d7e157b" +uuid = "ce943373-25bb-56aa-8eca-768745ed7b5a" +version = "6.10.2+1" + +[[deps.Qt6Svg_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll"] +git-tree-sha1 = "81587ff5ff25a4e1115ce191e36285ede0334c9d" +uuid = "6de9746b-f93d-5813-b365-ba18ad4a9cf3" +version = "6.10.2+0" + +[[deps.Qt6Wayland_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Qt6Base_jll", "Qt6Declarative_jll"] +git-tree-sha1 = "672c938b4b4e3e0169a07a5f227029d4905456f2" +uuid = "e99dba38-086e-5de3-a5b1-6e4c66e897c3" +version = "6.10.2+1" + +[[deps.QuadGK]] +deps = ["DataStructures", "LinearAlgebra"] +git-tree-sha1 = "5e8e8b0ab68215d7a2b14b9921a946fee794749e" +uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc" +version = "2.11.3" + + [deps.QuadGK.extensions] + QuadGKEnzymeExt = "Enzyme" + + [deps.QuadGK.weakdeps] + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "StyledStrings", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +version = "1.11.0" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.Random123]] +deps = ["Random", "RandomNumbers"] +git-tree-sha1 = "dbe5fd0b334694e905cb9fda73cd8554333c46e2" +uuid = "74087812-796a-5b5d-8853-05524746bad3" +version = "1.7.1" + +[[deps.RandomNumbers]] +deps = ["Random"] +git-tree-sha1 = "c6ec94d2aaba1ab2ff983052cf6a606ca5985902" +uuid = "e6cf234a-135c-5ec9-84dd-332b85af5143" +version = "1.6.0" + +[[deps.RangeArrays]] +git-tree-sha1 = "b9039e93773ddcfc828f12aadf7115b4b4d225f5" +uuid = "b3c3ace0-ae52-54e7-9d0b-2c1406fd6b9d" +version = "0.3.2" + +[[deps.Ratios]] +deps = ["Requires"] +git-tree-sha1 = "1342a47bf3260ee108163042310d26f2be5ec90b" +uuid = "c84ed2f1-dad5-54f0-aa8e-dbefe2724439" +version = "0.4.5" +weakdeps = ["FixedPointNumbers"] + + [deps.Ratios.extensions] + RatiosFixedPointNumbersExt = "FixedPointNumbers" + +[[deps.ReactantCore]] +deps = ["ExpressionExplorer", "MacroTools"] +git-tree-sha1 = "5b9e0fe7fb2cf3794fd96ac32bf2732aa4bb9776" +uuid = "a3311ec8-5e00-46d5-b541-4f83e724a433" +version = "0.1.19" + +[[deps.RealDot]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "9f0a1b71baaf7650f4fa8a1d168c7fb6ee41f0c9" +uuid = "c1ae055f-0cd5-4b69-90a6-9a35b1a98df9" +version = "0.1.0" + +[[deps.RecipesBase]] +deps = ["PrecompileTools"] +git-tree-sha1 = "5c3d09cc4f31f5fc6af001c250bf1278733100ff" +uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" +version = "1.3.4" + +[[deps.RecipesPipeline]] +deps = ["Dates", "NaNMath", "PlotUtils", "PrecompileTools", "RecipesBase"] +git-tree-sha1 = "45cf9fd0ca5839d06ef333c8201714e888486342" +uuid = "01d81517-befc-4cb6-b9ec-a95719d0359c" +version = "0.6.12" + +[[deps.RecursiveArrayTools]] +deps = ["Adapt", "ArrayInterface", "DocStringExtensions", "GPUArraysCore", "LinearAlgebra", "PrecompileTools", "RecipesBase", "StaticArraysCore", "SymbolicIndexingInterface"] +git-tree-sha1 = "d0282d612f22dcad7b81cf487b746e63aa2a6709" +uuid = "731186ca-8d62-57ce-b412-fbd966d074cd" +version = "3.54.0" + + [deps.RecursiveArrayTools.extensions] + RecursiveArrayToolsFastBroadcastExt = "FastBroadcast" + RecursiveArrayToolsFastBroadcastPolyesterExt = ["FastBroadcast", "Polyester"] + RecursiveArrayToolsForwardDiffExt = "ForwardDiff" + RecursiveArrayToolsKernelAbstractionsExt = "KernelAbstractions" + RecursiveArrayToolsMeasurementsExt = "Measurements" + RecursiveArrayToolsMonteCarloMeasurementsExt = "MonteCarloMeasurements" + RecursiveArrayToolsReverseDiffExt = ["ReverseDiff", "Zygote"] + RecursiveArrayToolsSparseArraysExt = ["SparseArrays"] + RecursiveArrayToolsStatisticsExt = "Statistics" + RecursiveArrayToolsStructArraysExt = "StructArrays" + RecursiveArrayToolsTablesExt = ["Tables"] + RecursiveArrayToolsTrackerExt = "Tracker" + RecursiveArrayToolsZygoteExt = "Zygote" + + [deps.RecursiveArrayTools.weakdeps] + FastBroadcast = "7034ab61-46d4-4ed7-9d0f-46aef9175898" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c" + Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" + MonteCarloMeasurements = "0987c9cc-fe09-11e8-30f0-b96dd679fdca" + Polyester = "f517fe37-dbe3-4b94-8317-1923a5111588" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" + StructArrays = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" + Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[[deps.Reexport]] +git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b" +uuid = "189a3867-3050-52da-a836-e630ba90ab69" +version = "1.2.2" + +[[deps.RelocatableFolders]] +deps = ["SHA", "Scratch"] +git-tree-sha1 = "ffdaf70d81cf6ff22c2b6e733c900c3321cab864" +uuid = "05181044-ff0b-4ac5-8273-598c1e38db00" +version = "1.0.1" + +[[deps.Requires]] +deps = ["UUIDs"] +git-tree-sha1 = "62389eeff14780bfe55195b7204c0d8738436d64" +uuid = "ae029012-a4dd-5104-9daa-d747884805df" +version = "1.3.1" + +[[deps.Rmath]] +deps = ["Random", "Rmath_jll"] +git-tree-sha1 = "5b3d50eb374cea306873b371d3f8d3915a018f0b" +uuid = "79098fc4-a85e-5d69-aa6a-4863f24498fa" +version = "0.9.0" + +[[deps.Rmath_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "58cdd8fb2201a6267e1db87ff148dd6c1dbd8ad8" +uuid = "f50d1b31-88e8-58de-be2c-1cc44531875f" +version = "0.5.1+0" + +[[deps.Roots]] +deps = ["Accessors", "CommonSolve", "Printf"] +git-tree-sha1 = "3eff988b9bd09543783e2e051b0a1eef11f65c2d" +uuid = "f2b01f46-fcfa-551c-844a-d8ac1e96c665" +version = "2.3.0" + + [deps.Roots.extensions] + RootsChainRulesCoreExt = "ChainRulesCore" + RootsForwardDiffExt = "ForwardDiff" + RootsIntervalRootFindingExt = "IntervalRootFinding" + RootsSymPyExt = "SymPy" + RootsSymPyPythonCallExt = "SymPyPythonCall" + RootsUnitfulExt = "Unitful" + + [deps.Roots.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + IntervalRootFinding = "d2bf35a9-74e0-55ec-b149-d360ff49b807" + SymPy = "24249f21-da20-56a4-8eb1-6a02cf4ae2e6" + SymPyPythonCall = "bc8888f7-b21e-4b7c-a06a-5d9c9496438c" + Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" + +[[deps.RoundingEmulator]] +git-tree-sha1 = "40b9edad2e5287e05bd413a38f61a8ff55b9557b" +uuid = "5eaf0fd0-dfba-4ccb-bf02-d820a40db705" +version = "0.2.1" + +[[deps.RuntimeGeneratedFunctions]] +deps = ["ExprTools", "SHA", "Serialization"] +git-tree-sha1 = "cfcdc949c4660544ab0fdeed169561cb22f835f4" +uuid = "7e49a35a-f44a-4d26-94aa-eba1b4ca6b47" +version = "0.5.18" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.SIMD]] +deps = ["PrecompileTools"] +git-tree-sha1 = "e24dc23107d426a096d3eae6c165b921e74c18e4" +uuid = "fdea26ae-647d-5447-a871-4b548cad5224" +version = "3.7.2" + +[[deps.SIMDTypes]] +git-tree-sha1 = "330289636fb8107c5f32088d2741e9fd7a061a5c" +uuid = "94e857df-77ce-4151-89e5-788b33177be4" +version = "0.1.0" + +[[deps.SLEEFPirates]] +deps = ["IfElse", "Static", "VectorizationBase"] +git-tree-sha1 = "456f610ca2fbd1c14f5fcf31c6bfadc55e7d66e0" +uuid = "476501e8-09a2-5ece-8869-fb82de89a1fa" +version = "0.6.43" + +[[deps.SciMLBase]] +deps = ["ADTypes", "Accessors", "Adapt", "ArrayInterface", "CommonSolve", "ConstructionBase", "Distributed", "DocStringExtensions", "EnumX", "FunctionWrappersWrappers", "IteratorInterfaceExtensions", "LinearAlgebra", "Logging", "Markdown", "Moshi", "PreallocationTools", "PrecompileTools", "Preferences", "Printf", "RecipesBase", "RecursiveArrayTools", "Reexport", "RuntimeGeneratedFunctions", "SciMLLogging", "SciMLOperators", "SciMLPublic", "SciMLStructures", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface"] +git-tree-sha1 = "908c0bf271604d09393a21c142116ab26f66f67c" +uuid = "0bca4576-84f4-4d90-8ffe-ffa030f20462" +version = "2.154.0" + + [deps.SciMLBase.extensions] + SciMLBaseChainRulesCoreExt = "ChainRulesCore" + SciMLBaseDifferentiationInterfaceExt = "DifferentiationInterface" + SciMLBaseDistributionsExt = "Distributions" + SciMLBaseEnzymeExt = "Enzyme" + SciMLBaseForwardDiffExt = "ForwardDiff" + SciMLBaseMLStyleExt = "MLStyle" + SciMLBaseMakieExt = "Makie" + SciMLBaseMeasurementsExt = "Measurements" + SciMLBaseMonteCarloMeasurementsExt = "MonteCarloMeasurements" + SciMLBaseMooncakeExt = "Mooncake" + SciMLBasePartialFunctionsExt = "PartialFunctions" + SciMLBasePyCallExt = "PyCall" + SciMLBasePythonCallExt = "PythonCall" + SciMLBaseRCallExt = "RCall" + SciMLBaseReverseDiffExt = "ReverseDiff" + SciMLBaseTrackerExt = "Tracker" + SciMLBaseZygoteExt = ["Zygote", "ChainRulesCore"] + + [deps.SciMLBase.weakdeps] + ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" + Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" + Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" + ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" + MLStyle = "d8e11817-5142-5d16-987a-aa16d5891078" + Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a" + Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" + MonteCarloMeasurements = "0987c9cc-fe09-11e8-30f0-b96dd679fdca" + Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" + PartialFunctions = "570af359-4316-4cb7-8c74-252c00c2016b" + PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" + PythonCall = "6099a3de-0909-46bc-b1f4-468b9a2dfc0d" + RCall = "6f49c342-dc21-5d91-9882-a32aef131414" + ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[[deps.SciMLLogging]] +deps = ["Logging", "LoggingExtras", "Preferences"] +git-tree-sha1 = "0161be062570af4042cf6f69e3d5d0b0555b6927" +uuid = "a6db7da4-7206-11f0-1eab-35f2a5dbe1d1" +version = "1.9.1" +weakdeps = ["Tracy"] + + [deps.SciMLLogging.extensions] + SciMLLoggingTracyExt = "Tracy" + +[[deps.SciMLOperators]] +deps = ["Accessors", "ArrayInterface", "DocStringExtensions", "LinearAlgebra"] +git-tree-sha1 = "234869cf9fee9258a95464b7a7065cc7be84db00" +uuid = "c0aeaf25-5076-4817-a8d5-81caf7dfa961" +version = "1.16.0" +weakdeps = ["SparseArrays", "StaticArraysCore"] + + [deps.SciMLOperators.extensions] + SciMLOperatorsSparseArraysExt = "SparseArrays" + SciMLOperatorsStaticArraysCoreExt = "StaticArraysCore" + +[[deps.SciMLPublic]] +git-tree-sha1 = "0ba076dbdce87ba230fff48ca9bca62e1f345c9b" +uuid = "431bcebd-1456-4ced-9d72-93c2757fff0b" +version = "1.0.1" + +[[deps.SciMLStructures]] +deps = ["ArrayInterface", "PrecompileTools"] +git-tree-sha1 = "607f6867d0b0553e98fc7f725c9f9f13b4d01a32" +uuid = "53ae85a6-f571-4167-b2af-e1d143709226" +version = "1.10.0" + +[[deps.ScientificTypesBase]] +deps = ["InteractiveUtils"] +git-tree-sha1 = "e785eaa35a0f5518a388f9010e66fda64ea95ede" +uuid = "30f210dd-8aff-4c5f-94ba-8e64358c1161" +version = "3.1.0" + +[[deps.ScopedValues]] +deps = ["HashArrayMappedTries", "Logging"] +git-tree-sha1 = "ac4b837d89a58c848e85e698e2a2514e9d59d8f6" +uuid = "7e506255-f358-4e82-b7e4-beb19740aa63" +version = "1.6.0" + +[[deps.Scratch]] +deps = ["Dates"] +git-tree-sha1 = "9b81b8393e50b7d4e6d0a9f14e192294d3b7c109" +uuid = "6c6a2e73-6563-6170-7368-637461726353" +version = "1.3.0" + +[[deps.SentinelArrays]] +deps = ["Dates", "Random"] +git-tree-sha1 = "ebe7e59b37c400f694f52b58c93d26201387da70" +uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c" +version = "1.4.9" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.Setfield]] +deps = ["ConstructionBase", "Future", "MacroTools", "StaticArraysCore"] +git-tree-sha1 = "c5391c6ace3bc430ca630251d02ea9687169ca68" +uuid = "efcf1570-3423-57d1-acb7-fd33fddbac46" +version = "1.1.2" + +[[deps.ShaderAbstractions]] +deps = ["ColorTypes", "FixedPointNumbers", "GeometryBasics", "LinearAlgebra", "Observables", "StaticArrays"] +git-tree-sha1 = "818554664a2e01fc3784becb2eb3a82326a604b6" +uuid = "65257c39-d410-5151-9873-9b3e5be5013e" +version = "0.5.0" + +[[deps.SharedArrays]] +deps = ["Distributed", "Mmap", "Random", "Serialization"] +uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" +version = "1.11.0" + +[[deps.ShiftedArrays]] +git-tree-sha1 = "503688b59397b3307443af35cd953a13e8005c16" +uuid = "1277b4bf-5013-50f5-be3d-901d8477a67a" +version = "2.0.0" + +[[deps.ShowCases]] +git-tree-sha1 = "7f534ad62ab2bd48591bdeac81994ea8c445e4a5" +uuid = "605ecd9f-84a6-4c9e-81e2-4798472b76a3" +version = "0.1.0" + +[[deps.Showoff]] +deps = ["Dates", "Grisu"] +git-tree-sha1 = "91eddf657aca81df9ae6ceb20b959ae5653ad1de" +uuid = "992d4aef-0814-514b-bc4d-f2e9a6c4116f" +version = "1.0.3" + +[[deps.SignedDistanceFields]] +deps = ["Statistics"] +git-tree-sha1 = "3949ad92e1c9d2ff0cd4a1317d5ecbba682f4b92" +uuid = "73760f76-fbc4-59ce-8f25-708e95d2df96" +version = "0.4.1" + +[[deps.SimpleBufferStream]] +git-tree-sha1 = "f305871d2f381d21527c770d4788c06c097c9bc1" +uuid = "777ac1f9-54b0-4bf8-805c-2214025038e7" +version = "1.2.0" + +[[deps.SimpleChains]] +deps = ["ArrayInterface", "CPUSummary", "ChainRulesCore", "CloseOpenIntervals", "ForwardDiff", "HostCPUFeatures", "IfElse", "LayoutPointers", "LoopVectorization", "ManualMemory", "Polyester", "Random", "SIMDTypes", "SLEEFPirates", "Static", "StaticArrayInterface", "StaticArrays", "StrideArraysCore", "UnPack", "VectorizationBase", "VectorizedRNG"] +git-tree-sha1 = "bd3f17dc89fe287eea8f94dd771f9343443dc664" +uuid = "de6bee2f-e2f4-4ec7-b6ed-219cc6f6e9e5" +version = "0.4.8" + +[[deps.SimpleTraits]] +deps = ["InteractiveUtils", "MacroTools"] +git-tree-sha1 = "be8eeac05ec97d379347584fa9fe2f5f76795bcb" +uuid = "699a6c99-e7fa-54fc-8d76-47d257e15c1d" +version = "0.9.5" + +[[deps.Sixel]] +deps = ["Dates", "FileIO", "ImageCore", "IndirectArrays", "OffsetArrays", "REPL", "libsixel_jll"] +git-tree-sha1 = "0494aed9501e7fb65daba895fb7fd57cc38bc743" +uuid = "45858cf5-a6b0-47a3-bbea-62219f50df47" +version = "0.1.5" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +version = "1.11.0" + +[[deps.SortingAlgorithms]] +deps = ["DataStructures"] +git-tree-sha1 = "64d974c2e6fdf07f8155b5b2ca2ffa9069b608d9" +uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c" +version = "1.2.2" + +[[deps.SparseArrays]] +deps = ["Libdl", "LinearAlgebra", "Random", "Serialization", "SuiteSparse_jll"] +uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +version = "1.11.0" + +[[deps.SparseConnectivityTracer]] +deps = ["ADTypes", "DocStringExtensions", "FillArrays", "LinearAlgebra", "Random", "SparseArrays"] +git-tree-sha1 = "590b72143436e443888124aaf4026a636049e3f5" +uuid = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" +version = "1.2.1" +weakdeps = ["ChainRulesCore", "LogExpFunctions", "NNlib", "NaNMath", "SpecialFunctions"] + + [deps.SparseConnectivityTracer.extensions] + SparseConnectivityTracerChainRulesCoreExt = "ChainRulesCore" + SparseConnectivityTracerLogExpFunctionsExt = "LogExpFunctions" + SparseConnectivityTracerNNlibExt = "NNlib" + SparseConnectivityTracerNaNMathExt = "NaNMath" + SparseConnectivityTracerSpecialFunctionsExt = "SpecialFunctions" + +[[deps.SparseInverseSubset]] +deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"] +git-tree-sha1 = "52962839426b75b3021296f7df242e40ecfc0852" +uuid = "dc90abb0-5640-4711-901d-7e5b23a2fada" +version = "0.1.2" + +[[deps.SparseMatrixColorings]] +deps = ["ADTypes", "DocStringExtensions", "LinearAlgebra", "PrecompileTools", "Random", "SparseArrays"] +git-tree-sha1 = "1c1be8c6fdfaf9b6c9e156c509e672953b8e6af7" +uuid = "0a514795-09f3-496d-8182-132a7b665d35" +version = "0.4.26" + + [deps.SparseMatrixColorings.extensions] + SparseMatrixColoringsCUDAExt = "CUDA" + SparseMatrixColoringsCliqueTreesExt = "CliqueTrees" + SparseMatrixColoringsColorsExt = "Colors" + SparseMatrixColoringsJuMPExt = ["JuMP", "MathOptInterface"] + + [deps.SparseMatrixColorings.weakdeps] + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + CliqueTrees = "60701a23-6482-424a-84db-faee86b9b1f8" + Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" + JuMP = "4076af6c-e467-56ae-b986-b466b2749572" + MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" + +[[deps.SpecialFunctions]] +deps = ["IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"] +git-tree-sha1 = "2700b235561b0335d5bef7097a111dc513b8655e" +uuid = "276daf66-3868-5448-9aa4-cd146d93841b" +version = "2.7.2" +weakdeps = ["ChainRulesCore"] + + [deps.SpecialFunctions.extensions] + SpecialFunctionsChainRulesCoreExt = "ChainRulesCore" + +[[deps.SplittablesBase]] +deps = ["Setfield", "Test"] +git-tree-sha1 = "e08a62abc517eb79667d0a29dc08a3b589516bb5" +uuid = "171d559e-b47b-412a-8079-5efa626c420e" +version = "0.1.15" + +[[deps.StableRNGs]] +deps = ["Random"] +git-tree-sha1 = "4f96c596b8c8258cc7d3b19797854d368f243ddc" +uuid = "860ef19b-820b-49d6-a774-d7a799459cd3" +version = "1.0.4" + +[[deps.StackViews]] +deps = ["OffsetArrays"] +git-tree-sha1 = "be1cf4eb0ac528d96f5115b4ed80c26a8d8ae621" +uuid = "cae243ae-269e-4f55-b966-ac2d0dc13c15" +version = "0.1.2" + +[[deps.Static]] +deps = ["CommonWorldInvalidations", "IfElse", "PrecompileTools", "SciMLPublic"] +git-tree-sha1 = "49440414711eddc7227724ae6e570c7d5559a086" +uuid = "aedffcd0-7271-4cad-89d0-dc628f76c6d3" +version = "1.3.1" + +[[deps.StaticArrayInterface]] +deps = ["ArrayInterface", "Compat", "IfElse", "LinearAlgebra", "PrecompileTools", "SciMLPublic", "Static"] +git-tree-sha1 = "aa1ea41b3d45ac449d10477f65e2b40e3197a0d2" +uuid = "0d7ed370-da01-4f52-bd93-41d350b8b718" +version = "1.9.0" +weakdeps = ["OffsetArrays", "StaticArrays"] + + [deps.StaticArrayInterface.extensions] + StaticArrayInterfaceOffsetArraysExt = "OffsetArrays" + StaticArrayInterfaceStaticArraysExt = "StaticArrays" + +[[deps.StaticArrays]] +deps = ["LinearAlgebra", "PrecompileTools", "Random", "StaticArraysCore"] +git-tree-sha1 = "246a8bb2e6667f832eea063c3a56aef96429a3db" +uuid = "90137ffa-7385-5640-81b9-e52037218182" +version = "1.9.18" +weakdeps = ["ChainRulesCore", "Statistics"] + + [deps.StaticArrays.extensions] + StaticArraysChainRulesCoreExt = "ChainRulesCore" + StaticArraysStatisticsExt = "Statistics" + +[[deps.StaticArraysCore]] +git-tree-sha1 = "6ab403037779dae8c514bad259f32a447262455a" +uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" +version = "1.4.4" + +[[deps.StatisticalTraits]] +deps = ["ScientificTypesBase"] +git-tree-sha1 = "89f86d9376acd18a1a4fbef66a56335a3a7633b8" +uuid = "64bff920-2084-43da-a3e6-9bb72801c0c9" +version = "3.5.0" + +[[deps.Statistics]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "ae3bb1eb3bba077cd276bc5cfc337cc65c3075c0" +uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +version = "1.11.1" +weakdeps = ["SparseArrays"] + + [deps.Statistics.extensions] + SparseArraysExt = ["SparseArrays"] + +[[deps.StatsAPI]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "178ed29fd5b2a2cfc3bd31c13375ae925623ff36" +uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" +version = "1.8.0" + +[[deps.StatsBase]] +deps = ["AliasTables", "DataAPI", "DataStructures", "IrrationalConstants", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] +git-tree-sha1 = "aceda6f4e598d331548e04cc6b2124a6148138e3" +uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" +version = "0.34.10" + +[[deps.StatsFuns]] +deps = ["HypergeometricFunctions", "IrrationalConstants", "LogExpFunctions", "Reexport", "Rmath", "SpecialFunctions"] +git-tree-sha1 = "91f091a8716a6bb38417a6e6f274602a19aaa685" +uuid = "4c63d2b9-4356-54db-8cca-17b64c39e42c" +version = "1.5.2" +weakdeps = ["ChainRulesCore", "InverseFunctions"] + + [deps.StatsFuns.extensions] + StatsFunsChainRulesCoreExt = "ChainRulesCore" + StatsFunsInverseFunctionsExt = "InverseFunctions" + +[[deps.StatsModels]] +deps = ["DataAPI", "DataStructures", "LinearAlgebra", "Printf", "REPL", "ShiftedArrays", "SparseArrays", "StatsAPI", "StatsBase", "StatsFuns", "Tables"] +git-tree-sha1 = "08786db4a1346d17d0a8d952d2e66fd00fa18192" +uuid = "3eaba693-59b7-5ba5-a881-562e759f1c8d" +version = "0.7.9" + +[[deps.StrideArraysCore]] +deps = ["ArrayInterface", "CloseOpenIntervals", "IfElse", "LayoutPointers", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface", "ThreadingUtilities"] +git-tree-sha1 = "83151ba8065a73f53ca2ae98bc7274d817aa30f2" +uuid = "7792a7ef-975c-4747-a70f-980b88e8d1da" +version = "0.5.8" + +[[deps.StringManipulation]] +deps = ["PrecompileTools"] +git-tree-sha1 = "d05693d339e37d6ab134c5ab53c29fce5ee5d7d5" +uuid = "892a3eda-7b42-436c-8928-eab12a02cf0e" +version = "0.4.4" + +[[deps.StructArrays]] +deps = ["ConstructionBase", "DataAPI", "Tables"] +git-tree-sha1 = "ad8002667372439f2e3611cfd14097e03fa4bccd" +uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" +version = "0.7.3" +weakdeps = ["Adapt", "GPUArraysCore", "KernelAbstractions", "LinearAlgebra", "SparseArrays", "StaticArrays"] + + [deps.StructArrays.extensions] + StructArraysAdaptExt = "Adapt" + StructArraysGPUArraysCoreExt = ["GPUArraysCore", "KernelAbstractions"] + StructArraysLinearAlgebraExt = "LinearAlgebra" + StructArraysSparseArraysExt = "SparseArrays" + StructArraysStaticArraysExt = "StaticArrays" + +[[deps.StructUtils]] +deps = ["Dates", "UUIDs"] +git-tree-sha1 = "fa95b3b097bcef5845c142ea2e085f1b2591e92c" +uuid = "ec057cc2-7a8d-4b58-b3b3-92acb9f63b42" +version = "2.7.1" + + [deps.StructUtils.extensions] + StructUtilsMeasurementsExt = ["Measurements"] + StructUtilsStaticArraysCoreExt = ["StaticArraysCore"] + StructUtilsTablesExt = ["Tables"] + + [deps.StructUtils.weakdeps] + Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" + StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" + Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" + +[[deps.StyledStrings]] +uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b" +version = "1.11.0" + +[[deps.SuiteSparse]] +deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"] +uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9" + +[[deps.SuiteSparse_jll]] +deps = ["Artifacts", "Libdl", "libblastrampoline_jll"] +uuid = "bea87d4a-7f5b-5778-9afe-8cc45184846c" +version = "7.7.0+0" + +[[deps.SymbolicIndexingInterface]] +deps = ["Accessors", "ArrayInterface", "RuntimeGeneratedFunctions", "StaticArraysCore"] +git-tree-sha1 = "94c58884e013efff548002e8dc2fdd1cb74dfce5" +uuid = "2efcf032-c050-4f8e-a9bb-153293bab1f5" +version = "0.3.46" +weakdeps = ["PrettyTables"] + + [deps.SymbolicIndexingInterface.extensions] + SymbolicIndexingInterfacePrettyTablesExt = "PrettyTables" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.TableOperations]] +deps = ["SentinelArrays", "Tables", "Test"] +git-tree-sha1 = "e383c87cf2a1dc41fa30c093b2a19877c83e1bc1" +uuid = "ab02a1b2-a7df-11e8-156e-fb1833f50b87" +version = "1.2.0" + +[[deps.TableTraits]] +deps = ["IteratorInterfaceExtensions"] +git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39" +uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" +version = "1.0.1" + +[[deps.Tables]] +deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "OrderedCollections", "TableTraits"] +git-tree-sha1 = "f2c1efbc8f3a609aadf318094f8fc5204bdaf344" +uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" +version = "1.12.1" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.TensorCore]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "1feb45f88d133a655e001435632f019a9a1bcdb6" +uuid = "62fd8b95-f654-4bbd-a8a5-9c27f68ccd50" +version = "0.1.1" + +[[deps.TerminalLoggers]] +deps = ["LeftChildRightSiblingTrees", "Logging", "Markdown", "Printf", "ProgressLogging", "UUIDs"] +git-tree-sha1 = "f133fab380933d042f6796eda4e130272ba520ca" +uuid = "5d786b92-1e48-4d6f-9151-6b4477ca9bed" +version = "0.1.7" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" + +[[deps.ThreadingUtilities]] +deps = ["ManualMemory"] +git-tree-sha1 = "d969183d3d244b6c33796b5ed01ab97328f2db85" +uuid = "8290d209-cae3-49c0-8002-c8c24d57dab5" +version = "0.5.5" + +[[deps.TiffImages]] +deps = ["CodecZstd", "ColorTypes", "DataStructures", "DocStringExtensions", "FileIO", "FixedPointNumbers", "IndirectArrays", "Inflate", "Mmap", "OffsetArrays", "PkgVersion", "PrecompileTools", "ProgressMeter", "SIMD", "UUIDs"] +git-tree-sha1 = "9ca5f1f2d42f80df4b8c9f6ab5a64f438bbd9976" +uuid = "731e570b-9d59-4bfa-96dc-6df516fadf69" +version = "0.11.9" + +[[deps.Tracy]] +deps = ["ExprTools", "LibTracyClient_jll", "Libdl"] +git-tree-sha1 = "73e3ff50fd3990874c59fef0f35d10644a1487bc" +uuid = "e689c965-62c8-4b79-b2c5-8359227902fd" +version = "0.1.6" + + [deps.Tracy.extensions] + TracyProfilerExt = "TracyProfiler_jll" + + [deps.Tracy.weakdeps] + TracyProfiler_jll = "0c351ed6-8a68-550e-8b79-de6f926da83c" + +[[deps.TranscodingStreams]] +git-tree-sha1 = "0c45878dcfdcfa8480052b6ab162cdd138781742" +uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" +version = "0.11.3" + +[[deps.Transducers]] +deps = ["Accessors", "ArgCheck", "BangBang", "Baselet", "CompositionsBase", "ConstructionBase", "DefineSingletons", "Distributed", "InitialValues", "Logging", "Markdown", "MicroCollections", "SplittablesBase", "Tables"] +git-tree-sha1 = "4aa1fdf6c1da74661f6f5d3edfd96648321dade9" +uuid = "28d57a85-8fef-5791-bfe6-a80928e7c999" +version = "0.4.85" + + [deps.Transducers.extensions] + TransducersAdaptExt = "Adapt" + TransducersBlockArraysExt = "BlockArrays" + TransducersDataFramesExt = "DataFrames" + TransducersLazyArraysExt = "LazyArrays" + TransducersOnlineStatsBaseExt = "OnlineStatsBase" + TransducersReferenceablesExt = "Referenceables" + + [deps.Transducers.weakdeps] + Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" + BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" + DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" + LazyArrays = "5078a376-72f3-5289-bfd5-ec5146d43c02" + OnlineStatsBase = "925886fa-5bf2-5e8e-b522-a9147a512338" + Referenceables = "42d2dcc6-99eb-4e98-b66c-637b7d73030e" + +[[deps.TriplotBase]] +git-tree-sha1 = "4d4ed7f294cda19382ff7de4c137d24d16adc89b" +uuid = "981d1d27-644d-49a2-9326-4793e63143c3" +version = "0.1.0" + +[[deps.TruncatedStacktraces]] +deps = ["InteractiveUtils", "MacroTools", "Preferences"] +git-tree-sha1 = "ea3e54c2bdde39062abf5a9758a23735558705e1" +uuid = "781d530d-4396-4725-bb49-402e4bee1e77" +version = "1.4.0" + +[[deps.URIs]] +git-tree-sha1 = "bef26fb046d031353ef97a82e3fdb6afe7f21b1a" +uuid = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4" +version = "1.6.1" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +version = "1.11.0" + +[[deps.UnPack]] +git-tree-sha1 = "387c1f73762231e86e0c9c5443ce3b4a0a9a0c2b" +uuid = "3a884ed6-31ef-47d7-9d2a-63182c4928ed" +version = "1.0.2" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +version = "1.11.0" + +[[deps.UnicodeFun]] +deps = ["REPL"] +git-tree-sha1 = "53915e50200959667e78a92a418594b428dffddf" +uuid = "1cfade01-22cf-5700-b092-accc4b62d6e1" +version = "0.4.1" + +[[deps.Unitful]] +deps = ["Dates", "LinearAlgebra", "Random"] +git-tree-sha1 = "57e1b2c9de4bd6f40ecb9de4ac1797b81970d008" +uuid = "1986cc42-f94f-5a68-af5c-568840ba703d" +version = "1.28.0" +weakdeps = ["ConstructionBase", "ForwardDiff", "InverseFunctions", "LaTeXStrings", "Latexify", "NaNMath", "Printf"] + + [deps.Unitful.extensions] + ConstructionBaseUnitfulExt = "ConstructionBase" + ForwardDiffExt = "ForwardDiff" + InverseFunctionsUnitfulExt = "InverseFunctions" + LatexifyExt = ["Latexify", "LaTeXStrings"] + NaNMathExt = "NaNMath" + PrintfExt = "Printf" + +[[deps.UnsafeAtomics]] +git-tree-sha1 = "0f30765c32d66d58e41f4cb5624d4fc8a82ec13b" +uuid = "013be700-e6cd-48c3-b4a1-df204f14c38f" +version = "0.3.1" +weakdeps = ["LLVM"] + + [deps.UnsafeAtomics.extensions] + UnsafeAtomicsLLVM = ["LLVM"] + +[[deps.Unzip]] +git-tree-sha1 = "ca0969166a028236229f63514992fc073799bb78" +uuid = "41fe7b60-77ed-43a1-b4f0-825fd5a5650d" +version = "0.2.0" + +[[deps.VectorizationBase]] +deps = ["ArrayInterface", "CPUSummary", "HostCPUFeatures", "IfElse", "LayoutPointers", "Libdl", "LinearAlgebra", "SIMDTypes", "Static", "StaticArrayInterface"] +git-tree-sha1 = "d1d9a935a26c475ebffd54e9c7ad11627c43ea85" +uuid = "3d5dd08c-fd9d-11e8-17fa-ed2836048c2f" +version = "0.21.72" + +[[deps.VectorizedRNG]] +deps = ["Distributed", "Random", "SLEEFPirates", "UnPack", "VectorizationBase"] +git-tree-sha1 = "5ca83562ba95272d8709c6c91e31e23c3c4c9825" +uuid = "33b4df10-0173-11e9-2a0c-851a7edac40e" +version = "0.2.25" +weakdeps = ["Requires", "StaticArraysCore"] + + [deps.VectorizedRNG.extensions] + VectorizedRNGStaticArraysExt = ["StaticArraysCore"] + +[[deps.Vulkan_Loader_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Wayland_jll", "Xorg_libX11_jll", "Xorg_libXrandr_jll", "xkbcommon_jll"] +git-tree-sha1 = "2f0486047a07670caad3a81a075d2e518acc5c59" +uuid = "a44049a8-05dd-5a78-86c9-5fde0876e88c" +version = "1.3.243+0" + +[[deps.Wayland_jll]] +deps = ["Artifacts", "EpollShim_jll", "Expat_jll", "JLLWrappers", "Libdl", "Libffi_jll"] +git-tree-sha1 = "96478df35bbc2f3e1e791bc7a3d0eeee559e60e9" +uuid = "a2964d1f-97da-50d4-b82a-358c7fce9d89" +version = "1.24.0+0" + +[[deps.WebP]] +deps = ["CEnum", "ColorTypes", "FileIO", "FixedPointNumbers", "ImageCore", "libwebp_jll"] +git-tree-sha1 = "aa1ca3c47f119fbdae8770c29820e5e6119b83f2" +uuid = "e3aaa7dc-3e4b-44e0-be63-ffb868ccd7c1" +version = "0.1.3" + +[[deps.WeightInitializers]] +deps = ["ConcreteStructs", "GPUArraysCore", "LinearAlgebra", "Random", "SpecialFunctions", "Statistics"] +git-tree-sha1 = "d79b71da9e7be904db615bdb99187d30753822a4" +uuid = "d49dbf32-c5c2-4618-8acc-27bb2598ef2d" +version = "1.3.1" + + [deps.WeightInitializers.extensions] + AMDGPUExt = "AMDGPU" + CUDAExt = "CUDA" + ChainRulesCoreExt = "ChainRulesCore" + GPUArraysExt = "GPUArrays" + ReactantExt = "Reactant" + + [deps.WeightInitializers.weakdeps] + AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" + CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" + Reactant = "3c362404-f566-11ee-1572-e11a4b42c853" + +[[deps.WoodburyMatrices]] +deps = ["LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "248a7031b3da79a127f14e5dc5f417e26f9f6db7" +uuid = "efce3f68-66dc-5838-9240-27a6d6f5f9b6" +version = "1.1.0" + +[[deps.XZ_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "b29c22e245d092b8b4e8d3c09ad7baa586d9f573" +uuid = "ffd25f8a-64ca-5728-b0f7-c24cf3aae800" +version = "5.8.3+0" + +[[deps.Xorg_libICE_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "a3ea76ee3f4facd7a64684f9af25310825ee3668" +uuid = "f67eecfb-183a-506d-b269-f58e52b52d7c" +version = "1.1.2+0" + +[[deps.Xorg_libSM_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libICE_jll"] +git-tree-sha1 = "9c7ad99c629a44f81e7799eb05ec2746abb5d588" +uuid = "c834827a-8449-5923-a945-d239c165b7dd" +version = "1.2.6+0" + +[[deps.Xorg_libX11_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libxcb_jll", "Xorg_xtrans_jll"] +git-tree-sha1 = "808090ede1d41644447dd5cbafced4731c56bd2f" +uuid = "4f6342f7-b3d2-589e-9d20-edeb45f2b2bc" +version = "1.8.13+0" + +[[deps.Xorg_libXau_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "aa1261ebbac3ccc8d16558ae6799524c450ed16b" +uuid = "0c0b7dd1-d40b-584c-a123-a41640f87eec" +version = "1.0.13+0" + +[[deps.Xorg_libXcursor_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libXfixes_jll", "Xorg_libXrender_jll"] +git-tree-sha1 = "6c74ca84bbabc18c4547014765d194ff0b4dc9da" +uuid = "935fb764-8cf2-53bf-bb30-45bb1f8bf724" +version = "1.2.4+0" + +[[deps.Xorg_libXdmcp_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "52858d64353db33a56e13c341d7bf44cd0d7b309" +uuid = "a3789734-cfe1-5b06-b2d0-1dd0d9d62d05" +version = "1.1.6+0" + +[[deps.Xorg_libXext_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"] +git-tree-sha1 = "1a4a26870bf1e5d26cd585e38038d399d7e65706" +uuid = "1082639a-0dae-5f34-9b06-72781eeb8cb3" +version = "1.3.8+0" + +[[deps.Xorg_libXfixes_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"] +git-tree-sha1 = "75e00946e43621e09d431d9b95818ee751e6b2ef" +uuid = "d091e8ba-531a-589c-9de9-94069b037ed8" +version = "6.0.2+0" + +[[deps.Xorg_libXi_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libXext_jll", "Xorg_libXfixes_jll"] +git-tree-sha1 = "a376af5c7ae60d29825164db40787f15c80c7c54" +uuid = "a51aa0fd-4e3c-5386-b890-e753decda492" +version = "1.8.3+0" + +[[deps.Xorg_libXinerama_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libXext_jll"] +git-tree-sha1 = "0ba01bc7396896a4ace8aab67db31403c71628f4" +uuid = "d1454406-59df-5ea1-beac-c340f2130bc3" +version = "1.1.7+0" + +[[deps.Xorg_libXrandr_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libXext_jll", "Xorg_libXrender_jll"] +git-tree-sha1 = "6c174ef70c96c76f4c3f4d3cfbe09d018bcd1b53" +uuid = "ec84b674-ba8e-5d96-8ba1-2a689ba10484" +version = "1.5.6+0" + +[[deps.Xorg_libXrender_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"] +git-tree-sha1 = "7ed9347888fac59a618302ee38216dd0379c480d" +uuid = "ea2f1a96-1ddc-540d-b46f-429655e07cfa" +version = "0.9.12+0" + +[[deps.Xorg_libpciaccess_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"] +git-tree-sha1 = "4909eb8f1cbf6bd4b1c30dd18b2ead9019ef2fad" +uuid = "a65dc6b1-eb27-53a1-bb3e-dea574b5389e" +version = "0.18.1+0" + +[[deps.Xorg_libxcb_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libXau_jll", "Xorg_libXdmcp_jll"] +git-tree-sha1 = "bfcaf7ec088eaba362093393fe11aa141fa15422" +uuid = "c7cfdc94-dc32-55de-ac96-5a1b8d977c5b" +version = "1.17.1+0" + +[[deps.Xorg_libxkbfile_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"] +git-tree-sha1 = "ed756a03e95fff88d8f738ebc2849431bdd4fd1a" +uuid = "cc61e674-0454-545c-8b26-ed2c68acab7a" +version = "1.2.0+0" + +[[deps.Xorg_xcb_util_cursor_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_xcb_util_image_jll", "Xorg_xcb_util_jll", "Xorg_xcb_util_renderutil_jll"] +git-tree-sha1 = "9750dc53819eba4e9a20be42349a6d3b86c7cdf8" +uuid = "e920d4aa-a673-5f3a-b3d7-f755a4d47c43" +version = "0.1.6+0" + +[[deps.Xorg_xcb_util_image_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_xcb_util_jll"] +git-tree-sha1 = "f4fc02e384b74418679983a97385644b67e1263b" +uuid = "12413925-8142-5f55-bb0e-6d7ca50bb09b" +version = "0.4.1+0" + +[[deps.Xorg_xcb_util_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libxcb_jll"] +git-tree-sha1 = "68da27247e7d8d8dafd1fcf0c3654ad6506f5f97" +uuid = "2def613f-5ad1-5310-b15b-b15d46f528f5" +version = "0.4.1+0" + +[[deps.Xorg_xcb_util_keysyms_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_xcb_util_jll"] +git-tree-sha1 = "44ec54b0e2acd408b0fb361e1e9244c60c9c3dd4" +uuid = "975044d2-76e6-5fbe-bf08-97ce7c6574c7" +version = "0.4.1+0" + +[[deps.Xorg_xcb_util_renderutil_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_xcb_util_jll"] +git-tree-sha1 = "5b0263b6d080716a02544c55fdff2c8d7f9a16a0" +uuid = "0d47668e-0667-5a69-a72c-f761630bfb7e" +version = "0.3.10+0" + +[[deps.Xorg_xcb_util_wm_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_xcb_util_jll"] +git-tree-sha1 = "f233c83cad1fa0e70b7771e0e21b061a116f2763" +uuid = "c22f9ab0-d5fe-5066-847c-f4bb1cd4e361" +version = "0.4.2+0" + +[[deps.Xorg_xkbcomp_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libxkbfile_jll"] +git-tree-sha1 = "801a858fc9fb90c11ffddee1801bb06a738bda9b" +uuid = "35661453-b289-5fab-8a00-3d9160c6a3a4" +version = "1.4.7+0" + +[[deps.Xorg_xkeyboard_config_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_xkbcomp_jll"] +git-tree-sha1 = "00af7ebdc563c9217ecc67776d1bbf037dbcebf4" +uuid = "33bec58e-1273-512f-9401-5d533626f822" +version = "2.44.0+0" + +[[deps.Xorg_xtrans_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "a63799ff68005991f9d9491b6e95bd3478d783cb" +uuid = "c5fb5394-a638-5e4d-96e5-b29de1b5cf10" +version = "1.6.0+0" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.13+1" + +[[deps.Zstd_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "446b23e73536f84e8037f5dce465e92275f6a308" +uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" +version = "1.5.7+1" + +[[deps.Zygote]] +deps = ["AbstractFFTs", "ChainRules", "ChainRulesCore", "DiffRules", "Distributed", "FillArrays", "ForwardDiff", "GPUArraysCore", "IRTools", "InteractiveUtils", "LinearAlgebra", "LogExpFunctions", "MacroTools", "NaNMath", "PrecompileTools", "Random", "SparseArrays", "SpecialFunctions", "Statistics", "ZygoteRules"] +git-tree-sha1 = "a29cbf3968d36022198bcc6f23fdfd70f7caf737" +uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" +version = "0.7.10" + + [deps.Zygote.extensions] + ZygoteAtomExt = "Atom" + ZygoteColorsExt = "Colors" + ZygoteDistancesExt = "Distances" + ZygoteTrackerExt = "Tracker" + + [deps.Zygote.weakdeps] + Atom = "c52e3926-4ff0-5f6e-af25-54175e0327b1" + Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" + Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" + Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + +[[deps.ZygoteRules]] +deps = ["ChainRulesCore", "MacroTools"] +git-tree-sha1 = "434b3de333c75fc446aa0d19fc394edafd07ab08" +uuid = "700de1a5-db45-46bc-99cf-38207098b444" +version = "0.2.7" + +[[deps.cuDNN]] +deps = ["CEnum", "CUDA", "CUDA_Runtime_Discovery", "CUDNN_jll"] +git-tree-sha1 = "5494b0ae3ddc5ca0f64159d5ed3a396f36e0fcfe" +uuid = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd" +version = "1.4.7" + +[[deps.demumble_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "6498e3581023f8e530f34760d18f75a69e3a4ea8" +uuid = "1e29f10c-031c-5a83-9565-69cddfc27673" +version = "1.3.0+0" + +[[deps.eudev_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "c3b0e6196d50eab0c5ed34021aaa0bb463489510" +uuid = "35ca27e7-8b34-5b7f-bca9-bdc33f59eb06" +version = "3.2.14+0" + +[[deps.fzf_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "b6a34e0e0960190ac2a4363a1bd003504772d631" +uuid = "214eeab7-80f7-51ab-84ad-2988db7cef09" +version = "0.61.1+0" + +[[deps.isoband_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "51b5eeb3f98367157a7a12a1fb0aa5328946c03c" +uuid = "9a68df92-36a6-505f-a73e-abb412b6bfb4" +version = "0.2.3+0" + +[[deps.libaom_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "371cc681c00a3ccc3fbc5c0fb91f58ba9bec1ecf" +uuid = "a4ae2306-e953-59d6-aa16-d00cac43593b" +version = "3.13.1+0" + +[[deps.libass_jll]] +deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl", "Zlib_jll"] +git-tree-sha1 = "125eedcb0a4a0bba65b657251ce1d27c8714e9d6" +uuid = "0ac62f75-1d6f-5e53-bd7c-93b484bb37c0" +version = "0.17.4+0" + +[[deps.libblastrampoline_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" +version = "5.11.0+0" + +[[deps.libdecor_jll]] +deps = ["Artifacts", "Dbus_jll", "JLLWrappers", "Libdl", "Libglvnd_jll", "Pango_jll", "Wayland_jll", "xkbcommon_jll"] +git-tree-sha1 = "9bf7903af251d2050b467f76bdbe57ce541f7f4f" +uuid = "1183f4f0-6f2a-5f1a-908b-139f9cdfea6f" +version = "0.2.2+0" + +[[deps.libdrm_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libpciaccess_jll"] +git-tree-sha1 = "63aac0bcb0b582e11bad965cef4a689905456c03" +uuid = "8e53e030-5e6c-5a89-a30b-be5b7263a166" +version = "2.4.125+1" + +[[deps.libevdev_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "56d643b57b188d30cccc25e331d416d3d358e557" +uuid = "2db6ffa8-e38f-5e21-84af-90c45d0032cc" +version = "1.13.4+0" + +[[deps.libfdk_aac_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "646634dd19587a56ee2f1199563ec056c5f228df" +uuid = "f638f0a6-7fb0-5443-88ba-1cc74229b280" +version = "2.0.4+0" + +[[deps.libinput_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "eudev_jll", "libevdev_jll", "mtdev_jll"] +git-tree-sha1 = "91d05d7f4a9f67205bd6cf395e488009fe85b499" +uuid = "36db933b-70db-51c0-b978-0f229ee0e533" +version = "1.28.1+0" + +[[deps.libpng_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"] +git-tree-sha1 = "45a20bd63e4fafc84ed9e4ac4ba15c8a7deff803" +uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f" +version = "1.6.57+0" + +[[deps.libsixel_jll]] +deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "libpng_jll"] +git-tree-sha1 = "c1733e347283df07689d71d61e14be986e49e47a" +uuid = "075b6546-f08a-558a-be8f-8157d0f608a5" +version = "1.10.5+0" + +[[deps.libva_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll", "Xorg_libXext_jll", "Xorg_libXfixes_jll", "libdrm_jll"] +git-tree-sha1 = "7dbf96baae3310fe2fa0df0ccbb3c6288d5816c9" +uuid = "9a156e7d-b971-5f62-b2c9-67348b8fb97c" +version = "2.23.0+0" + +[[deps.libvorbis_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Ogg_jll"] +git-tree-sha1 = "11e1772e7f3cc987e9d3de991dd4f6b2602663a5" +uuid = "f27f6e37-5d2b-51aa-960f-b287f2bc3b7a" +version = "1.3.8+0" + +[[deps.libwebp_jll]] +deps = ["Artifacts", "Giflib_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libglvnd_jll", "Libtiff_jll", "libpng_jll"] +git-tree-sha1 = "4e4282c4d846e11dce56d74fa8040130b7a95cb3" +uuid = "c5f90fcd-3b7e-5836-afba-fc50a0988cb2" +version = "1.6.0+0" + +[[deps.mtdev_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "b4d631fd51f2e9cdd93724ae25b2efc198b059b1" +uuid = "009596ad-96f7-51b1-9f1b-5ce2d5e8a71e" +version = "1.1.7+0" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.59.0+0" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+2" + +[[deps.x264_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "14cc7083fc6dff3cc44f2bc435ee96d06ed79aa7" +uuid = "1270edf5-f2f9-52d2-97e9-ab00b5d0237a" +version = "10164.0.1+0" + +[[deps.x265_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "e7b67590c14d487e734dcb925924c5dc43ec85f3" +uuid = "dfaa095f-4041-5dcd-9319-2fabd8486b76" +version = "4.1.0+0" + +[[deps.xkbcommon_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libxcb_jll", "Xorg_xkeyboard_config_jll"] +git-tree-sha1 = "a1fc6507a40bf504527d0d4067d718f8e179b2b8" +uuid = "d8fb68d0-12a3-5cfd-a85a-d49703b185fd" +version = "1.13.0+0" diff --git a/projects/clustered_sites/Project.toml b/projects/clustered_sites/Project.toml new file mode 100644 index 0000000..061c4a6 --- /dev/null +++ b/projects/clustered_sites/Project.toml @@ -0,0 +1,29 @@ +[deps] +AlgebraOfGraphics = "cbdf2221-f076-402e-a563-3d30da359d67" +Bijectors = "76274a88-744f-5084-9051-94815aaf08c4" +CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" +CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" +Clustering = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5" +CommonSolve = "38540f10-b2f7-11e9-35d8-d573e4eb0ff2" +ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +DistributionFits = "45214091-1ed4-4409-9bcf-fdb48a05e921" +Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" +HybridVariationalInference = "a108c475-a4e2-4021-9a84-cfa7df242f64" +JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819" +Lux = "b2108857-7c20-44ae-9111-449ecde12c47" +MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" +MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" +MultivariateStats = "6f286f6a-111f-5878-ab1e-185364afe411" +OptimizationOptimisers = "42dfb2eb-d2b4-4451-abcd-913932933ac1" +PairPlots = "43a3c2be-4208-490b-832a-a21dcd55d7da" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +SimpleChains = "de6bee2f-e2f4-4ec7-b6ed-219cc6f6e9e5" +StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3" +StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" +StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" +StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" +UnPack = "3a884ed6-31ef-47d7-9d2a-63182c4928ed" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" +cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd" diff --git a/projects/clustered_sites/VarSumDij.md b/projects/clustered_sites/VarSumDij.md new file mode 100644 index 0000000..e69f453 --- /dev/null +++ b/projects/clustered_sites/VarSumDij.md @@ -0,0 +1,184 @@ +# Appendix: Variance of the Sum of Squared Mahalanobis Distances + +## A.1 Overview + +In multivariate environmental analysis, it is common to summarize the pairwise dissimilarity among $n$ observations using the total sum of squared Mahalanobis distances, + +$$S = \sum_{i Matrix +overdispersion_test(Y_null, μ, Σ, α=0.05) + +println("\n── Scenario 2: Sample drawn from inflated variance (2Σ) ──\n") +Y_over = rand(MvNormal(μ, 2*Σ), n)' |> Matrix +overdispersion_test(Y_over, μ, Σ, α=0.05) + +println("\n── Scenario 3: Monte Carlo size and power study ──\n") +n_sim = 20_000 +rejections_H0 = 0 +rejections_H1 = 0 + +for _ in 1:n_sim + Y0 = rand(MvNormal(μ, Σ), n)' |> Matrix + Y1 = rand(MvNormal(μ, 2*Σ), n)' |> Matrix + + r0 = overdispersion_test(Y0, μ, Σ, α=0.05) + r1 = overdispersion_test(Y1, μ, Σ, α=0.05) + + rejections_H0 += (r0.p_chisq < 0.05) + rejections_H1 += (r1.p_chisq < 0.05) +end + +@printf(" Empirical size (H₀ true, should ≈ 0.05): %.4f\n", + rejections_H0 / n_sim) +@printf(" Empirical power (H₁ true, Σ→2Σ): %.4f\n", + rejections_H1 / n_sim) diff --git a/projects/clustered_sites/clustered_sites.qmd b/projects/clustered_sites/clustered_sites.qmd new file mode 100644 index 0000000..e06a492 --- /dev/null +++ b/projects/clustered_sites/clustered_sites.qmd @@ -0,0 +1,674 @@ +--- +title: "Testing uncertainty with clustered sites" +engine: julia +execute: + echo: true + output: false + daemon: 3600 +format: + commonmark: + variant: -raw_html+tex_math_dollars + wrap: preserve +bibliography: twutz_txt.bib +--- + +``` @meta +CurrentModule = HybridVariationalInference +``` + +By assuming that observations of sites with very similar model parameters +can be pooled, the estimated uncertainty decreases. +HVI provides a method that emulates this pooling of observations for +sites being in the same cluster. + +Here we explore the decrease of parameter uncertainty. + +It turns out, that the method works. The uncertainty decreases with the +number of pooled sites. +However, the uncertainty is severely underestimated, compared to the +residuals between estimated and true parameters. + +We hypothesize that this is due to not accounting for model error. +The inversion currently assumes a perfect ML and PBM models, so that true +observations can be simulated, and the error is only due to observation +noise. However, the ML model does not predicts biased parameters, there are +assumptions in the shape of the posterior that introduce bias, and also +the PBM and model drivers can be biased. + +One needs a method, where a part of the uncertainty decreases with number +of observations, but another part does not scale with number of observations. + +```{julia} +using HybridVariationalInference +using HybridVariationalInference: HybridVariationalInference as HVI +using StableRNGs +using MLUtils +using ComponentArrays: ComponentArrays as CA +using SimpleChains # for reloading the optimized problem +using DistributionFits +import StatsBase +using JLD2 +using CairoMakie +using PairPlots # scatterplot matrices +if !isdefined(Main, :load_inversion_results) + load_inversion_results = false +end +# load_inversion_results = true # for loading rather than recalibrating +``` + +```{julia} +#| echo: false +# deprecated: load optimized problem from tutorial +() -> begin + fname = "../../docs/src/tutorials/intermediate/basic_cpu_results.jld2" + print(abspath(fname)) + probo = load(fname, "probo"); + probo_tut = HybridProblem(probo); + scenario = Val((:clustered_sites2,:exactML)) + rng = StableRNG(111) # make sure to be the same as when constructing train_dataloader + train_data = gen_hybridproblem_synthetic(rng, DoubleMM.DoubleMMCase(); scenario); + n_site = size(train_data.θMs_true, 2) + train_data = (; train_data..., i_sites = 1:n_site) + n_site, n_batch_orig = get_hybridproblem_n_site_and_batch(probo; scenario) + train_dataloader = MLUtils.DataLoader( + CA.getdata.(values(train_data[(:xM, :xP, :y_o, :y_unc, :i_sites)])), batchsize=n_batch_orig, partial=false, shuffle=true) + probo_tut = HybridProblem(prob_tut; train_dataloader) + prob_tut.ϕg .= randn(length(prob_tut.ϕg)) # new neutrally initialized ML parameterization + prob_tut.ϕq.ρsM .= zero(eltype(prob_tut.ϕq.ρsM)) # new neutrally initialized ML parameterization + +end +``` + +In the example problem, there are two global parameters, and two +parameters, that are predicted by covariates. + +```{julia} +n_batch = 80 +scenario = Val((:clustered_sites2,:exactML)) +rng = StableRNG(111) # make sure to be the same as when constructing +train_data = gen_hybridproblem_synthetic(rng, DoubleMM.DoubleMMCase(); scenario); +n_site = size(train_data.θMs_true, 2) +train_data = (; train_data..., i_sites = 1:n_site) +prob0 = HybridProblem(DoubleMM.DoubleMMCase(); scenario, n_batch); +#get_hybridproblem_n_site_and_batch(prob0; scenario) +get_hybridproblem_par_templates(prob0; scenario) +``` + +The synthetic data (observations and covariates) for this exercise generated parameters and covariates in +three clusters with different number of sites. + +```{julia} +n_sites_cluster, clusters_pre = HVI.get_clusters(n_site; scenario); +n_sites_cluster +``` +```{julia} +Makie.boxplot(clusters_pre, train_data.θMs_true[1,:], axis=(;xlabel="cluster", ylabel="true r1")) +``` + +```{julia} +Makie.barplot(n_sites_cluster, axis=(;xlabel="cluster", ylabel="number of observations")) +``` + + +For experimenting with clustering later, assume that each site is a representative +of a larger cluster, and hence its uncertainty is reduced. +Asumme `n_cluster_initial` clusters_pre, that will be split into smaller cluster +during the process. +It is expected that is easiert to find a good fit for the uncertainty with larger +clusters and smaller estimated uncertainty, because the expected value on +transformed scale is closer to the point estimate. + + +### Perform the inversion + +```{julia} +n_cluster_initial = 12 +cluster_rep = size(train_data.xP,2) ÷ n_cluster_initial +#cluster_rep = 8 # start with larger uncertainty to detect +``` + +```{julia} +using OptimizationOptimisers +import Zygote +ENV["MLDATADEVICES_SILENCE_WARN_NO_GPU"] = 1 +if load_inversion_results + probo_point = load("intermediate/probo_point.jld2", "probo"); + probo_unclustered_rep = load("intermediate/probo_unclustered_rep.jld2", "probo"); +else + solver = HybridPointSolver(; alg=Adam(0.002)) + (; probo) = solve(prob0, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 200, + ); + # continue sampling with smaller adjustment rate + solver = HybridPointSolver(; alg=Adam(0.001)) + (; probo) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 500, + ); + probo_point = HybridProblem(probo); + + () -> begin + fname = "intermediate/probo_point.jld2" + mkpath("intermediate") + if probo_point isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_point) + end + probo_point = probo = load(fname, "probo"); + end + + solver = HybridPosteriorSolver(; alg=Adam(0.001), n_MC=3) + (; probo, interpreters) = solve(probo_point, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 200, + cluster_rep, + ); + # continue with more MC samples for better approximation + # but also with more samples per batch + probo = HybridProblem(probo, n_batch = min(n_site, probo_point.n_batch * 4)) + solver = HybridPosteriorSolver(; alg=Adam(0.0002), n_MC=6) + (; probo, interpreters) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 300, + cluster_rep, + ); + probo_unclustered_rep = HybridProblem(probo); + + () -> begin + fname = "intermediate/probo_unclustered_rep.jld2" + mkpath("intermediate") + if probo_unclustered_rep isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_unclustered_rep, interpreters) + end + end + + # refit without cluster_rep to compare to clustered result + probo = probo_unclustered_rep + solver = HybridPosteriorSolver(; alg=Adam(0.002), n_MC=3) + (; probo, interpreters) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 1000, + # do not include cluster_rep, + ); + probo_unclustered = probo + + () -> begin + fname = "intermediate/probo_unclustered.jld2" + mkpath("intermediate") + if probo_unclustered isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_unclustered, interpreters) + end + end + +end +``` + + +```{julia} +() -> begin + # Try inverting full batches, i.e. all sites together, to see if this + # changes estimated uncertainty. + probo = prob_allsites = HybridProblem(probo; n_batch = n_site) + solver = HybridPosteriorSolver(; alg=Adam(0.001), n_MC=6) + (; probo, interpreters) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 300, + ); + probo_unclusterd_allsites = HybridProblem(probo); + + () -> begin + fname = "intermediate/clustered_sites_probo_allsites.jld2" + mkpath("intermediate") + if probo_allsites isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_allsites, interpreters) + end + probo = load(fname, "probo"); + end +end +``` + +## Sample + +Because of the log-transformation to unconstrained space, a LogNormal distribution +is estimated for each parameter. Hence, we fit marginal LogNormal distributions to the posterior samples and get properties from it. + + +```{julia} +function fit_lognormal(x; p_low = 0.25, p_upp = 0.75) + q_low, q_upp = quantile(x, (p_low,p_upp)) + d = fit(LogNormal, @qp(q_low, p_low), @qp(q_upp, p_upp)) +end +function sample_and_aggregate(rng, probo, n_sample_pred = 400) + #(; θsP, θsMs_tr) = sample_posterior(rng, probo; n_sample_pred, is_testmode = true) + (; y, θsP, θsMs_tr) = predict_hvi(rng, probo; n_sample_pred); + θMs_pred_tr = apply_preserve_axes(x -> median(x, dims=3),θsMs_tr)[:,:,1] + θMs_dist_pred_tr = map(fit_lognormal, eachslice(θsMs_tr; dims=(1,2))) + θMs_mode_pred_tr = map(mode, θMs_dist_pred_tr) + θMs_q05pred_tr = map(x -> quantile(x, 0.05), θMs_dist_pred_tr) + θMs_q95pred_tr = map(x -> quantile(x, 0.95), θMs_dist_pred_tr) + #θMs_stdlogpred_tr = apply_preserve_axes(x -> std(log.(x), dims=3),θsMs_tr)[:,:,1] + #θMs_stdpred_tr = apply_preserve_axes(x -> std(x, dims=3),θsMs_tr)[:,:,1] + (; y, θsP, θsMs_tr, θMs_pred_tr, θMs_dist_pred_tr, θMs_mode_pred_tr, θMs_q05pred_tr, θMs_q95pred_tr) +end +() -> begin + s = sample_and_aggregate(rng, probo); +end +``` + +```{julia} +using StableRNGs +rng = StableRNG(112) +s_unclustered = sample_and_aggregate(rng, probo_unclustered_rep); +``` + +```{julia} +#| eval: false +#| show = false +() -> begin + s = s_unclustered + s.θMs_pred_tr[:,:K1] + # s.θMs_pred_tr'[:K1,:] # TODO file issue + (;y_pred, θMs_tr) = predict_point_hvi(rng, probo; scenario) + θMs_predpoint_tr = θMs_tr + par = :K1 + #par = :r1 + i_cluster = 2 + i_sites_cluster = findall(clusters .== i_cluster) + fig = Figure(); ax = Axis(fig[1,1], xlabel="true $par in cluster $i_cluster",ylabel="point predicted $par") + scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], θMs_predpoint_tr[:,par][i_sites_cluster]) + # scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], θMs_meanpred_tr[:,par][i_sites_cluster]) + ablines!(ax, 0, 1, color=:lightgray) + fig +end +``` + +## Inspect results + +Correlation is greatly overesimated? +```{julia} +#CM = HVI.get_hybridproblem_correlation_Ms(prob) +CM = HVI.get_hybridproblem_correlation_Ms(probo; xM = train_data.xM) +``` + +Leads to slight biases in some parameters of median prediction. +With this batchsize, it also only predicts the mean of each cluster and +is not able to resolve the within-cluster pattern. +```{julia} +s = s_unclustered +#s = s_clustered +#s = s_clustered_800 +par = :r1 +#par = :K1 +i_cluster = 2 +#i_cluster = 1 +#i_cluster = 3 +i_sites_cluster = findall(clusters_pre .== i_cluster) +i_sites_cluster = axes(clusters_pre,1) +fig = Figure(); ax = Axis(fig[1,1], xlabel="true $par in cluster $i_cluster",ylabel="predicted $par") +scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s.θMs_q95pred_tr[:,par][i_sites_cluster], label = "95% quantile") +scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s.θMs_mode_pred_tr[:,par][i_sites_cluster], label = "mode") +# scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s,θMs_meanpred_tr[:,par][i_sites_cluster]) +scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s.θMs_q05pred_tr[:,par][i_sites_cluster], label = "5% quantile") +ablines!(ax, 0, 1, color=:lightgray) +axislegend(ax, unique=true; position = :rb) +fig +``` + + +```{julia} +par = :r1 +i_sites_cluster = findall(clusters_pre .== i_cluster) +i_sites_cluster = axes(clusters_pre,1) +fig = Figure(); ax = Axis(fig[1,1], xlabel="true $par",ylabel="predicted $par") +scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s.θMs_q95pred_tr[:,par][i_sites_cluster], label = "95% quantile") +scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s.θMs_mode_pred_tr[:,par][i_sites_cluster], label = "mode") +# scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s,θMs_meanpred_tr[:,par][i_sites_cluster]) +scatter!(ax, train_data.θMs_true[par,:][i_sites_cluster], s.θMs_q05pred_tr[:,par][i_sites_cluster], label = "5% quantile") +ablines!(ax, 0, 1, color=:lightgray) +axislegend(ax, unique=true; position = :rb) +fig + +``` + +The true parameter are well within the predicted distribution. +Here for one site: +```{julia} +i_site = i_sites_cluster[4] +fig = Figure(); ax = Axis(fig[1,1], xlabel="$par", ylabel="density") +density!(ax,s.θsMs_tr[i_site,par,:], label="predicted") +vlines!(ax, train_data.θMs_true[par,i_site], color=:red, linestyle=:dash, label="true") +axislegend(ax, unique=true) +fig +``` + +Had hoped that predicted uncertainty would be lower for clusters with more sites. +But its not the case. +```{julia} +θMs_ci90_tr = s.θMs_q95pred_tr .- s.θMs_q05pred_tr + +par = :r1 +#par = :K1 +#boxplot(clusters_pre, θMs_stdlogpred_tr[:,par], axis=(;xlabel="cluster", ylabel="std(log.($par))")) +#boxplot(clusters_pre, θMs_stdpred_tr[:,par], axis=(;xlabel="cluster", ylabel="std($par)")) +boxplot(clusters_pre, θMs_ci90_tr[:,par], axis=(;xlabel="cluster", ylabel="length cf 90% ($par)")) + +#boxplot(clusters_pre, θMs_stdpred_tr[:,par] ./ θMs_meanpred_tr[:,par], axis=(;xlabel="cluster", ylabel="cv($par)")) +``` + +```{julia} +scatter(s.θMs_pred_tr[:,par], θMs_ci90_tr[:,par], color = clusters_pre, axis=(;xlabel="predicted $par", ylabel="length cf 90% ($par)")) +``` +Plot range of predictions. + +The synthetic noise in the observations is of magnitude (standard deviation): +```{julia} +#| output: true +σ = exp(train_data.y_unc[1]/2) +``` + +Here for two intermediate substrate concentrations, the uncertainty of the +prediction is a magnitude lower and of the same magnitude in all clusters. +The pattern with the magnitude of predictions changes with model drivers of substrate concentrations. + +For clustered fits, the predicted uncertainty of model output rates +is consistently lower for the cluster with many sites. + +```{julia} +#| output: true +i_obs = 4 +#i_obs = 1 +#i_obs = 8 +ymean = [mean(s.y[i_obs,i_site,:]) for i_site in axes(s.θsMs_tr, 1)] +ysd = [std(s.y[i_obs,i_site,:]) for i_site in axes(s.θsMs_tr, 1)] +fig = Figure(); ax = Axis(fig[1,1], xlabel="mean(y$i_obs)",ylabel="sd(y$i_obs)") +#i_cluster = 1 +for i_cluster in axes(n_sites_cluster,1) + local i_sites_cluster = findall(clusters_pre .== i_cluster) + scatter!(ax, ymean[i_sites_cluster], ysd[i_sites_cluster]) +end +ablines!(ax, [σ] ,[0]) +fig +``` + +## Providing information on clustering of sites + +Now we explore, how predicted uncertainty changes, when we provide +information on the clustering to the HVI inversion. + +```{julia} +() -> begin + # start from the unclustered inversion + fname = "intermediate/probo_unclustered_rep.jld2" + prob = probo_unclustered_rep = load(fname, "probo"); +end +``` + +```{julia} +() -> begin + # try with larger number of sites within batch + prob = HybridProblem(probo_unclustered_rep; n_batch = 400) + prob = HybridProblem(probo_unclustered_rep; n_batch = n_site) +end +``` + +In the case where only clustering information for cluster 3 is provided, +the reduction in uncertainty is much more moderate and +the pattern within cluster is recovered. +```{julia} +() -> begin + #cluster_orig = clusters + # try not proding cluster information for two first clusters + i = findfirst(==(3), clusters) + clusters[1:i] .= 1:i + clusters[i:end] .= i + # + # invert and inspect + # + fname = "intermediate/clustered_sites_probo_400_3rd_cluster.jld2" + mkpath("intermediate") + jldsave(fname, false, IOStream; probo, interpreters) +end + +``` +Note the `clusters` argument to `solve`. +```{julia} +using OptimizationOptimisers +import Zygote +ENV["MLDATADEVICES_SILENCE_WARN_NO_GPU"] = 1 + +solver = HybridPosteriorSolver(; alg=Adam(0.002), n_MC=3) +(; probo, interpreters) = solve(prob, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 800, + clusters, +); +# continue with more MC samples and lower gradient update rate for better approximation +solver = HybridPosteriorSolver(; alg=Adam(0.0002), n_MC=6) +(; probo, interpreters) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 800, + clusters, +); +() -> begin +(; probo, interpreters) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs = 600, + clusters, +); +end + +probo_clustered = HybridProblem(probo); +# probo = probo_clustered +s = s_clustered = sample_and_aggregate(rng, probo_clustered); + +probo_clustered_400 = HybridProblem(probo); +# probo = probo_clustered +s = s_clustered_400 = sample_and_aggregate(rng, probo_clustered_400); + +probo_clustered_800 = HybridProblem(probo); +# probo = probo_clustered +s = s_clustered_800 = sample_and_aggregate(rng, probo_clustered_800); + +``` + +```{julia} +() -> begin + fname = "intermediate/probo_clustered.jld2" + mkpath("intermediate") + if probo_clustered isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_clustered, interpreters) + end + probo_clustered = probo = load(fname, "probo"); +end +``` + +```{julia} +() -> begin + fname = "intermediate/probo_clustered_400.jld2" + mkpath("intermediate") + if probo_clustered_400 isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_clustered_400, interpreters) + end + probo = load(fname, "probo"); +end +``` + +```{julia} +() -> begin + fname = "intermediate/probo_clustered_800.jld2" + mkpath("intermediate") + if probo_clustered_800 isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_clustered_800, interpreters) + end + probo = load(fname, "probo"); +end +``` + + +## Clustering sites + +Rather than doing three big clusters, start with smaller clusters +and increase cluster size until estimates variance of a single predicted +parameter at unconstrained scale matches the variance of the predictions +within one cluster. + +Start with the unclustered HVI results (each observation its own cluster) + +```{julia} +() -> begin + # start from the unclustered inversion + fname = "intermediate/probo_unclustered_rep.jld2" + prob = probo_unclustered_rep = load(fname, "probo"); +end +``` + +```{julia} +() -> begin + probo = HybridProblem(probo_point) + probo = HybridProblem(probo_clustered) + probo = HybridProblem(probo_unclustered_rep) +end +``` + +Get the lower triangular cholesky factor of the correlation matrix. This is +easier to invert than the full correlation matrix. + +```{julia} +solver = HybridPosteriorSolver(; alg=Adam(0.001), n_MC=6) +(; probo, clusters) = HVI.refit_clusters(rng, probo, solver, train_data.xM; ) +probo_clustered = probo +# probo = probo_clustered +s = s_clustered = sample_and_aggregate(rng, probo_clustered); + + +() -> begin + fname = "intermediate/probo_clustered.jld2" + mkpath("intermediate") + if probo_clustered isa AbstractHybridProblem # do not save on failure above + jldsave(fname, false, IOStream; probo = probo_clustered, clusters) + end + probo, clusters = load(fname, "probo", "clusters"); +end +``` + +Plot the clusters. +```{julia} + fig = Figure(); ax = Axis(fig[1,1], xlabel="ζ_1",ylabel="ζ_2") + #i_cluster = first(cluster_ids) + for i_cluster in 1:maximum(clusters) + i_sites_cluster = findall(clusters .== i_cluster) + #scatter!(ax, mean(X[i_sites_cluster,1]), std(X[i_sites_cluster,1])) + scatter!(ax, X[i_sites_cluster,1], X[i_sites_cluster,2]) + end + #ablines!(ax, [σ] ,[0]) + fig +``` + +There are clusters with only few individuals, but also clusters with about 50 individuals. + +```{julia} +cnts_clusters = StatsBase.countmap(clusters) +sort(collect(values(cnts_clusters))) +``` + +Compare observed and predicted marginal distances within each cluster. + +```{julia} +(; X, σMs) = extract_MLpred(probo, train_data.xM) +i_par = 2 +stat_cluster = map((k for k in keys(cnts_clusters))) do i_cluster + i_sites = findall(isequal.(clusters, i_cluster)) + X_cl = X[i_sites,:] + σM_cl = vec(std(X_cl; dims = 1)) + q05 = quantile(X_cl[:,i_par], 0.05) + q95 = quantile(X_cl[:,i_par], 0.95) + σM_cl_pred = vec(median(σMs[:,i_sites]; dims=2)) + ζ_cl = vec(median(X_cl; dims=1)) + #HVI.check_overdispersion(X_cl, Ucor, σM_cl_pred) + (;i_cluster, σM_pred = σM_cl_pred[i_par], σM = σM_cl[i_par], ζ = ζ_cl[i_par], q05, q95) +end |> HVI.vectuptotupvec + +fig = Figure(); ax = Axis(fig[1,1], xlabel="ζ_$i_par",ylabel="standard deviation") +scatter!(ax, stat_cluster.ζ, stat_cluster.σM_pred, label = "predicted") +scatter!(ax, stat_cluster.ζ, stat_cluster.σM, label = "across sites in cluster") +axislegend(ax, unique=true; position = :rt) +fig +``` + +The predicted uncertainty decreases with prescribed clusters, but in a continuous +fashion. +```{julia} +scatter(X[:,1], σMs[1,:], color = clusters_pre, axis=(;xlabel="predicted ζ_r", ylabel="predicted standard deviation")) +``` +```{julia} +fig = Figure(); ax = Axis(fig[1,1], xlabel="ζ_$i_par",ylabel="std") +scatter!(ax, stat_cluster.ζ, stat_cluster.ζ) +scatter!(ax, stat_cluster.ζ, stat_cluster.q05) +scatter!(ax, stat_cluster.ζ, stat_cluster.q95) +fig +``` + +### Inspect clustered vs non_clustered + +Load the clusterd and non-clustered results +```{julia} +probo_point = load("intermediate/probo_point.jld2", "probo"); +s_point = sample_and_aggregate(rng, probo_point); +probo_unclustered = load("intermediate/probo_unclustered.jld2", "probo"); +s_unclustered = sample_and_aggregate(rng, probo_unclustered); +probo_clustered = load("intermediate/probo_clustered.jld2", "probo"); +s = s_clustered = sample_and_aggregate(rng, probo_clustered); +``` + +For plotting use AoG and collection information into a DataFrame. +```{julia} +using AlgebraOfGraphics, CairoMakie, DataFrames +#n_sites_cluster, clusters = HybridVariationalInference.get_clusters(n_site; scenario); +set_aog_theme!() # optional +pars = keys(s_clustered.θMs_mode_pred_tr[1,:]) +ss = (clustered = s_clustered, unclustered = s_unclustered) +#ss = (clustered = s_clustered_400, unclustered = s_unclustered) +#ss = (clustered_800 = s_clustered_800, clustered_400 = s_clustered_400, clustered = s_clustered, unclustered = s_unclustered) +par = first(pars) +ssym = first(keys(ss)) +df = mapreduce(vcat, keys(ss)) do ssym + s = ss[ssym] + mapreduce(vcat, pars) do par + df = DataFrame(;clustering=ssym, par, cluster = clusters_pre, dist = s.θMs_dist_pred_tr[:,par]) + end +end +df = transform(df, :dist .=> + ByRow.([median, mode, x -> quantile(x, 0.05), x -> quantile(x, 0.95)]) .=> + [:median, :mode, :q05, :q95]) +df = transform(df, [:q95, :q05] => ByRow((u,l) -> u - l) => :ci90); +``` + +With clustering, the uncertainty is magnitude smaller. + +```{julia} +plt = data(df) * mapping(:cluster, :ci90 => "length of 90% cf interval", color = :clustering => nonnumeric, dodge = :clustering => nonnumeric, row = :par) * visual(BoxPlot); +draw(plt, facet = (; linkxaxes = :minimal, linkyaxes = :none)) +``` + +Had hoped that predicted uncertainty would be lower for clusters with more sites. +But its not the case for a batch size of 80. +Only for a batch size of 400, the inversion picks up the lower uncertainty. + +```{julia} +plt = data(subset(df, :clustering => ByRow(x -> x ∉ (:unclustered,)))) * mapping(:cluster, :ci90 => "length of 90% cf interval", color = :clustering => nonnumeric, dodge = :clustering => nonnumeric, row = :par) * visual(BoxPlot); +draw(plt, facet = (; linkxaxes = :minimal, linkyaxes = :none)) +``` + +A hypothesis is that uncertainty is underestimated with smaller batch size +because there are not enough sites of smaller cluster size in each batch. + +But when increasing n_batch again, estimated uncertainty decreases. +The uncertainty estimate seems to be not very robust, but depend on +the realized ML fit. + +Another hypothesis is that the inversion is strongly influenced not accounting for model uncertainty. +Residual variance in observations is entirely allocated to uncertainty in +model parameters. + + + diff --git a/projects/clustered_sites/intermediate/clustered_sites_probo_allsites.jld2 b/projects/clustered_sites/intermediate/clustered_sites_probo_allsites.jld2 new file mode 100644 index 0000000..fc9426d Binary files /dev/null and b/projects/clustered_sites/intermediate/clustered_sites_probo_allsites.jld2 differ diff --git a/projects/clustered_sites/learn_clustering.jl b/projects/clustered_sites/learn_clustering.jl new file mode 100644 index 0000000..7735ac3 --- /dev/null +++ b/projects/clustered_sites/learn_clustering.jl @@ -0,0 +1,78 @@ +using DataFrames, LinearAlgebra, Statistics, Distances, Clustering +#using HierarchicalClustering + +# Step 1: Load or define your dataset +# Example: Replace this with your actual data loading +# df = DataFrame(CSV.File("your_data.csv")) + +# Example dataset (replace with your own) +#n_vars = 10 # number of variables (columns) +n_vars = 2 +n_rows = 2000 # number of records (rows) +X = randn(n_rows, n_vars) # your data: each row is a record + +# Convert to DataFrame (optional, for clarity) +df = DataFrame(X, :auto) + +# Step 2: Compute the covariance matrix (if not already given) +Sigma = cov(Matrix(df)) # Shape: (n_vars, n_vars) + +# Optional: Check if Sigma is invertible +if det(Sigma) ≈ 0 + @warn "Covariance matrix is singular. Adding regularization." + λ = 1e-6 # small regularization + Sigma = Sigma + λ * I +end + +# Compute inverse of covariance matrix +Sigma_inv = inv(Sigma) + +# Step 3: Compute Mahalanobis distances between all pairs of records +# We'll compute the squared Mahalanobis distance matrix efficiently + +# Convert DataFrame to matrix +X_matrix = Matrix(df) + +# Precompute differences: (X_i - X_j) for all i, j +# Use broadcasting to compute all pairwise differences +n_rows, n_vars = size(X_matrix) # e.g., 2000×10 +#diffs = X_matrix' .- X_matrix # Shape: (n_vars, n_rows, n_rows) +diffs = reshape(X_matrix, n_rows, 1, n_vars) .- reshape(X_matrix, 1, n_rows, n_vars) + + + +# Step 4: Compute Mahalanobis distances +dist_matrix = zeros(n_rows, n_rows) +for i in 1:n_rows + for j in 1:n_rows + diff = diffs[i, j, :] # (n_vars,) + dist_matrix[i, j] = sqrt(diff' * Sigma_inv * diff) + end +end +# If you want to go faster, use: +# dist_matrix = sqrt.(sum((diffs * Sigma_inv) .* diffs, dims=3)[:, :, 1]) + +# Step 5: Hierarchical clustering +#linkage_matrix = linkage(dist_matrix, :ward) +#clusters = fcluster(linkage_matrix, k=5, criterion=:maxclust) +res = hclust(dist_matrix; linkage = :ward) +clusters = cutree(res; k = 10) + +# Add to DataFrame +df[!, :cluster] = clusters + + +# Optional: View first few rows with cluster assignments +println(first(df, 10)) + +# Optional: Visualize (e.g., using PCA + coloring) +using Plots +using MultivariateStats + +# Reduce to 2D using PCA +#pca = fit(PCA, X_matrix'; maxoutdim=2) +pca = fit(PCA, X_matrix'; maxoutdim=2) +X_pca = predict(pca, X_matrix') + +# Plot +scatter(X_pca[1, :], X_pca[2, :], group=clusters, label="Cluster", title="Clustering Results (PCA)") diff --git a/projects/clustered_sites/test_overdispersion.qmd b/projects/clustered_sites/test_overdispersion.qmd new file mode 100644 index 0000000..ff4e9fb --- /dev/null +++ b/projects/clustered_sites/test_overdispersion.qmd @@ -0,0 +1,79 @@ + +# Testing for points within cluster to be spread more than expected by average Covariance + +## Test counting the number of pairs +That share one component. +```{julia} +using Plots +using StatsBase: binomial + +# Function to count overlapping pairs (shared exactly one point) +function count_overlapping_pairs(m) + # Generate all unordered pairs (i,j) with i < j + pairs = collect(Iterators.flatten(((i, j) for j in i+1:m) for i in 1:m)) + N = length(pairs) + + count = 0 + # Loop over all unordered pairs of pairs + for i in 1:N + for j in (i+1):N + p1 = pairs[i] + p2 = pairs[j] + # Count intersection: share one point + # (1,2) and (2,3) share point 2, but (1,2) and (3,4) share no points + n_shared = length(intersect(p1, p2)) + if n_shared == 1 + count += 1 + elseif n_shared > 1 + error("Unexpected: pairs should not share more than one point") + end + end + end + return count +end + +# Generate data for m = 3 to 15 +m_values = 3:10 +counts = [count_overlapping_pairs(m) for m in m_values] +expected = [3 * binomial(m, 3) for m in m_values] + +# Create the plot +Plots.plot(m_values, counts, + label="Observed", + marker=:circle, + color=:blue, + linewidth=2, + xlabel="Number of Points (m)", + ylabel="Number of Overlapping Pairs", + title="Number of Unordered Pairs of Pairs Sharing Exactly One Point", + grid=true, + legend=:topright) + +# Add expected line +Plots.plot!(m_values, expected, + label="Expected: 3 × binom{m}{3}", + linestyle=:dash, + color=:red, + linewidth=2) + +# Add annotations +annotate!(10, 100, text("Matches exactly", :right, 10, :black)) + +# Save as PNG (optional) +# savefig("overlapping_pairs_plot.png") + +# Display +display(plot!) +``` + +## Test simplifying Cov formula + +```{julia} +n = 2 +m_values = 3:10 +expected1 = [2*n * binomial(m,2) + 2*n * 3 * binomial(m, 3) for m in m_values] +expected2 = [n * m * abs2(m-1) for m in m_values] +expected1 == expected2 +``` + +## derivation diff --git a/projects/clustered_sites/test_overdispersion_theory.md b/projects/clustered_sites/test_overdispersion_theory.md new file mode 100644 index 0000000..8ef9cbb --- /dev/null +++ b/projects/clustered_sites/test_overdispersion_theory.md @@ -0,0 +1,111 @@ +# Appendix B: Testing for Overdispersion in Multivariate Environmental Data + +## B.1 Overview + +In aggregating individuals parameter sets into clusters of similar parameters, one need to choose cluster size so that dispersion across parameter sets matches the predicted uncertainty of a parameter prediction. If a cluster of individuals exhibits **greater multivariate spread** than expected under the predicted reference distribution $\mathcal{N}_p(\mu, \Sigma)$, the cluster needs to be split into subclusters. + +We formalize this check of greater spread as a one-sided hypothesis test based on the total sum of squared Mahalanobis distances, + +$$S_n = \sum_{i= v"1.11.0-DEV.469" && eval(Meta.parse("public Logistic")) include("bijectors_utils.jl") export AbstractHVIApproximation, AbstractMeanHVIApproximation +export get_numberof_MLinputs export MeanHVIApproximation, MeanHVIApproximationMat export AbstractMeanVarSepHVIApproximation, MeanVarSepHVIApproximation +export AbstractMeanScalingHVIApproximation, MeanScalingHVIApproximation include("HVIApproximation.jl") export AbstractComponentArrayInterpreter, ComponentArrayInterpreter, StaticComponentArrayInterpreter export flatten1, get_concrete, get_positions, stack_ca_int, compose_interpreters -export construct_partric +export construct_partric, get_numberof_inputs_outputs include("ComponentArrayInterpreter.jl") export AbstractModelApplicator, construct_ChainsApplicator @@ -65,14 +72,17 @@ include("PBMApplicator.jl") # export AbstractGPUDataHandler, NullGPUDataHandler, get_default_GPUHandler # include("GPUDataHandler.jl") -export AbstractHybridProblem, get_hybridproblem_MLapplicator, get_hybridproblem_PBmodel, +export AbstractHybridProblem, AbstractPenaltyComputer, CustomPenaltyComputer, + compute_penalty, + get_hybridproblem_MLapplicator, get_hybridproblem_PBmodel, + get_hybridproblem_penalty_computer, get_hybridproblem_ϕq, get_hybridproblem_θP, get_hybridproblem_float_type, gen_hybridproblem_synthetic, get_hybridproblem_par_templates, get_hybridproblem_transforms, get_hybridproblem_train_dataloader, get_hybridproblem_test_data, get_hybridproblem_neg_logden_obs, - get_hybridproblem_n_covar, + get_hybridproblem_n_covar, # default get_hybridproblem_n_site_and_batch, get_hybridproblem_cor_ends, get_hybridproblem_priors, @@ -80,6 +90,7 @@ export AbstractHybridProblem, get_hybridproblem_MLapplicator, get_hybridproblem_ gen_cov_pred, construct_dataloader_from_synthetic, gdev_hybridproblem_dataloader, gdev_hybridproblem_data, + get_hybridproblem_HVIApproximation, setup_PBMpar_interpreter, get_gdev_MP, init_hybrid_ϕq @@ -99,7 +110,7 @@ include("gencovar.jl") export callback_loss include("util_opt.jl") -export cpu_ca, apply_preserve_axes +export cpu_ca, apply_preserve_axes, as_data_frame include("util_ca.jl") export ones_similar_x @@ -111,10 +122,12 @@ include("logden_normal.jl") export get_ca_starts, get_ca_ends, get_cor_count include("cholesky.jl") -export neg_elbo_gtf, sample_posterior, predict_hvi, zero_penalty_loss -export get_hybridproblem_correlation_Ms +export neg_elbo_gtf, sample_posterior, predict_hvi, ZeroPenaltyComputer +export get_hybridproblem_correlation_Ms, get_hybridproblem_cholesky_correlation_Ms +export get_marginal_std include("elbo_dev.jl") include("elbo_sepvec.jl") +include("elbo_scaling.jl") include("elbo.jl") include("elbo2.jl") @@ -132,4 +145,7 @@ include("RRuleMonitor.jl") include("chainrulescore.jl") +export cluster_records, extract_MLpred +include("clustering.jl") + end diff --git a/src/ModelApplicator.jl b/src/ModelApplicator.jl index 1ccabfd..64b0d91 100644 --- a/src/ModelApplicator.jl +++ b/src/ModelApplicator.jl @@ -69,9 +69,25 @@ Implemented for machine learning extensions, such as Flux or SimpleChains. `ml_engine` usually is of type `Val{Symbol}`, e.g. Val(:Flux). See `select_ml_engine`. Scenario is a value-type of `NTuple{_,Symbol}`. + +Implementations may call +`get_numberof_inputs_outputs(prob; scenario) -> (n_input, n_output)`. """ function construct_3layer_MLApplicator end +function get_numberof_inputs_outputs(prob; scenario) + n_covar = get_hybridproblem_n_covar(prob; scenario) + n_pbm_covars = length(get_hybridproblem_pbmpar_covars(prob; scenario)) + n_input = n_covar + n_pbm_covars + (;θM) = get_hybridproblem_par_templates(prob; scenario) + #n_out = length(θM) + approx = get_hybridproblem_HVIApproximation(prob; scenario) + n_output = get_numberof_MLinputs(approx, θM) + (;n_input, n_output) +end + + + """ select_ml_engine(;scenario) @@ -100,19 +116,28 @@ of the wrapped `app` by scalar `y0`. struct MagnitudeModelApplicator{M,A} <: AbstractModelApplicator app::A multiplier::M + range_scaled::UnitRange{Int} end +@functor MagnitudeModelApplicator (app, multiplier) +function MagnitudeModelApplicator(app::AbstractModelApplicator, multiplier; range_scaled = 1:0) + MagnitudeModelApplicator(app, multiplier, range_scaled) +end function apply_model(app::MagnitudeModelApplicator, x, ϕ; kwargs...) #@show size(x), size(ϕ), app.multiplier @assert eltype(app.multiplier) == eltype(ϕ) - apply_model(app.app, x, ϕ; kwargs...) .* app.multiplier + if !isempty(app.range_scaled) + res = apply_model(app.app, x, ϕ; kwargs...) + res_scaled = index_firstdim(res,app.range_scaled) .* app.multiplier + combine_range(res, res_scaled, app.range_scaled) + else + apply_model(app.app, x, ϕ; kwargs...) .* app.multiplier + end end - - """ - NormalScalingModelApplicator(app, μ, σ) + NormalScalingModelApplicator(app, μ, σ; range_scaled=1:0) NormalScalingModelApplicator(app, priors, transM) Wrapper around AbstractModelApplicator that transforms each output @@ -133,14 +158,16 @@ struct NormalScalingModelApplicator{VF,A} <: AbstractModelApplicator app::A μ::VF σ::VF + range_scaled::UnitRange{Int} end -@functor NormalScalingModelApplicator +@functor NormalScalingModelApplicator (app, μ, σ) """ NormalScalingModelApplicator(app, lowers, uppers, FT::Type; repeat_inner::Integer = 1) Fit a Normal distribution to number iterators `lower` and `upper` and transform results of the wrapped `app` `AbstractModelApplicator`. +The results of the inner applicator are assumed to be (0,1). If `repeat_inner` is given, each fitted distribution is repeated as many times to support independent multivariate normal distribution. @@ -149,6 +176,28 @@ It usually corresponds to the type used in other ML-parts of the model, e.g. `Fl """ function NormalScalingModelApplicator( app::AbstractModelApplicator, lowers, uppers, FT::Type; + range_scaled = 1:0, + repeat_inner::Integer = 1) + pars = map(lowers, uppers) do lower, upper + dζ = fit(Normal, @qp_l(lower), @qp_u(upper)) + params(dζ) + end + # use collect to make it an array that works with gpu + μ = repeat(collect(FT, first.(pars)); inner=(repeat_inner,)) + σ = repeat(collect(FT, last.(pars)); inner=(repeat_inner,)) + app = if isempty(range_scaled) || (repeat_inner == 1) + NormalScalingModelApplicator(app, μ, σ, range_scaled) + else + error("debug and implement NormalScalingModelApplicator with repeated blocks, e.g. for multivariate normal distribution with independent components") + range_scaled_rep = repeat(range_scaled, inner=repeat_inner) + app_sub = NormalScalingModelApplicator(app, μ, σ, range_scaled_rep[2:end]) + NormalScalingModelApplicator(app_sub, μ, σ, range_scaled_rep[2:end]) + end +end + +function NormalScalingModelApplicator( + app::AbstractModelApplicator, μ, σ; + range_scaled = 1:0, # empty range indicates rescaling all outputs repeat_inner::Integer = 1) pars = map(lowers, uppers) do lower, upper dζ = fit(Normal, @qp_l(lower), @qp_u(upper)) @@ -157,24 +206,33 @@ function NormalScalingModelApplicator( # use collect to make it an array that works with gpu μ = repeat(collect(FT, first.(pars)); inner=(repeat_inner,)) σ = repeat(collect(FT, last.(pars)); inner=(repeat_inner,)) - NormalScalingModelApplicator(app, μ, σ) + range_scaled_rep = repeat(range_scaled, inner=repeat_inner) + NormalScalingModelApplicator(app, μ, σ, range_scaled_rep) end + function apply_model(app::NormalScalingModelApplicator, x, ϕ; kwargs...) y_perc = apply_model(app.app, x, ϕ; kwargs...) # @show typeof(app.μ) # @show typeof(ϕ) @assert eltype(app.μ) == eltype(ϕ) - ans = norminvcdf.(app.μ, app.σ, y_perc) # from StatsFuns + ans = if !isempty(app.range_scaled) + ans_scaled = norminvcdf.(app.μ, app.σ, index_firstdim(y_perc,app.range_scaled)) # from StatsFuns + combine_range(y_perc, ans_scaled, app.range_scaled) + else + ans_scaled = norminvcdf.(app.μ, app.σ, y_perc) + end # if !all(isfinite.(ans)) # @info "NormalScalingModelApplicator.apply_model: encountered non-finite results" # #@show ans, y_perc, app.μ, app.σ # #@show app.app, x, ϕ # #error("error to print stacktrace") # end - ans end +index_firstdim(v::AbstractVector, i) = v[i] +index_firstdim(v::AbstractMatrix, i) = v[i,:] + """ RangeScalingModelApplicator(app, y0) @@ -185,26 +243,43 @@ struct RangeScalingModelApplicator{VF,A} <: AbstractModelApplicator offset::VF width::VF app::A + range_scaled::UnitRange{Int} end function apply_model(app::RangeScalingModelApplicator, x, ϕ; kwargs...) res0 = apply_model(app.app, x, ϕ; kwargs...) - res0 .* app.width .+ app.offset + if !isempty(app.range_scaled) + res_scaled = index_firstdim(res0,app.range_scaled) .* app.width .+ app.offset + combine_range(res0, res_scaled, app.range_scaled) + else + res0 .* app.width .+ app.offset + end +end + +function combine_range(res0, res_scaled, range_scaled) + range_before = 1:(range_scaled[1]-1) + range_after = (range_scaled[end]+1):size(res0,1) + vcat(index_firstdim(res0,range_before), res_scaled, index_firstdim(res0,range_after)) end + + """ RangeScalingModelApplicator(app, lowers, uppers, FT::Type; repeat_inner::Integer = 1) Provide the target ragen by vectors `lower` and `upper`. The size of both outputs must correspond to the size of the output of `app`. + """ function RangeScalingModelApplicator( app::AbstractModelApplicator, lowers::VT, uppers::VT, - FT::Type) where VT<:AbstractVector + FT::Type; + range_scaled = 1:0 + ) where VT<:AbstractVector width = collect(FT, uppers .- lowers) lowersFT = collect(FT, lowers) # convert eltype - RangeScalingModelApplicator(lowersFT, width, app) + RangeScalingModelApplicator(lowersFT, width, app, range_scaled) end diff --git a/src/OneBasedVectorWithZero.jl b/src/OneBasedVectorWithZero.jl new file mode 100644 index 0000000..04cff25 --- /dev/null +++ b/src/OneBasedVectorWithZero.jl @@ -0,0 +1,109 @@ +""" + OneBasedVectorWithZero(data) + +A thin wrapper over an `AbstractVector` that exposes a linear 1-based indexing API +mapping `v[i]` to `data[axes(data, 1)[i]]` on the underlying storage +and provides a value at index 0 (defaulting to zero) that is not stored in the underlying vector. + +Example usage: +```jldoctest; output=false +v = HybridVariationalInference.OneBasedVectorWithZero([10,20,30]) +v[1] == 10 +v[2] == 20 +v[3] == 30 +v[0] == 0 # default value at index 0 is zero +v[[1,0,0,3]] == [10,0,0,30] +# output +true +``` +""" +struct OneBasedVectorWithZero{E,V<:AbstractVector{E}} <: AbstractVector{E} + data::V + val_at_zero::E # optional field to store the value at index 0 if needed +end + +OneBasedVectorWithZero(v::AbstractVector; val_at_zero=zero(eltype(v))) = OneBasedVectorWithZero(v, val_at_zero) +Base.size(v::OneBasedVectorWithZero) = size(v.data) +Base.length(v::OneBasedVectorWithZero) = length(v.data) +Base.eltype(v::OneBasedVectorWithZero) = eltype(v.data) +Base.axes(v::OneBasedVectorWithZero) = (Base.OneTo(length(v)),) +Base.IndexStyle(::Type{<:OneBasedVectorWithZero}) = IndexLinear() +Base.empty(v::OneBasedVectorWithZero) = OneBasedVectorWithZero(empty(v.data), v.val_at_zero) + +function Base.getindex(v::OneBasedVectorWithZero, i::Integer) + if i == 0 + return v.val_at_zero + elseif 1 <= i <= length(v) + return v.data[axes(v.data,1)[i]] + else + throw(BoundsError(v, i)) + end +end + +# Bools is a subtype of Integer, need to handle this case separately +function Base.getindex(v::OneBasedVectorWithZero, inds::AbstractVector{<:Bool}) + v.data[inds] +end + +function Base.getindex(v::OneBasedVectorWithZero, inds::AbstractVector{<:Integer}) + return map(i -> getindex(v, i), inds) +end + +function Base.setindex!(v::OneBasedVectorWithZero, value, i::Integer) + # setting index 0 is not allowed + if 1 <= i <= length(v) + v.data[axes(v.data, 1)[i]] = value + return v + else + throw(BoundsError(v, i)) + end +end + +function Base.iterate(v::OneBasedVectorWithZero, state=1) + state > length(v) ? nothing : (v[state], state + 1) +end + +function ChainRulesCore.rrule(::typeof(getindex), v::OneBasedVectorWithZero, i::Integer) + if i == 0 + y = v.val_at_zero + function pullback0(ȳ) + # no gradient to base vector or val_at_zero + return NoTangent(), NoTangent(), NoTangent() + end + return y, pullback0 + elseif 1 <= i <= length(v) + y = v.data[axes(v.data,1)[i]] + function pullback(ȳ) + dv = zero(v.data) + dv[axes(v.data,1)[i]] += ȳ + return NoTangent(), OneBasedVectorWithZero(dv, zero(eltype(v))), NoTangent() + end + return y, pullback + else + throw(BoundsError(v, i)) + end +end + +function ChainRulesCore.rrule(::typeof(getindex), v::OneBasedVectorWithZero, inds::AbstractVector{<:Integer}) + y = getindex(v, inds) + function pullback(ȳ) + dv = zero(v.data) + for (k, idx) in enumerate(inds) + if 1 <= idx <= length(v) + dv[axes(v.data,1)[idx]] += ȳ[k] + end + end + return NoTangent(), OneBasedVectorWithZero(dv, zero(eltype(v))), NoTangent() + end + return y, pullback +end + +if isdefined(Main, :Zygote) || isdefined(HybridVariationalInference, :Zygote) + Zygote.@adjoint function OneBasedVectorWithZero(v::AbstractVector; val_at_zero=zero(eltype(v))) + h = OneBasedVectorWithZero(v, val_at_zero) + return h, Δ -> ( + Δ isa OneBasedVectorWithZero ? Δ.data : Δ, + NoTangent() + ) + end +end diff --git a/src/PBMApplicator.jl b/src/PBMApplicator.jl index fcde585..221c9e7 100644 --- a/src/PBMApplicator.jl +++ b/src/PBMApplicator.jl @@ -11,9 +11,12 @@ where other parts of the interface, where sites are in the last dimension. The reason is that a column of a parameter is more efficient to transform between constrain and unconstrained scale. -- Results are of shape `(n_obs x n_site_pred x n_MC)`. +- Result is a tuple with two entries, . + The first entry are observations of shape `(n_obs x n_site_pred x n_MC)` + The second entry are additional results of shape `(n_add x n_site_pred x n_MC)` + that are passed to the penalty function. -They may also provide function `apply_model(app, θP, θMs_tr, xP)` for a sample +They may also provide function `apply_model(app, θP::Matrix, θMs_tr::Array, xP)` for a sample of parameters, i.e. where an additional dimension is added to both `θP` and `θMs`. However, there is a default implementation that mapreduces across these dimensions. @@ -50,10 +53,12 @@ first variant of `apply_model` for each sample. # docu in struct function apply_model(app::AbstractPBMApplicator, θsP::AbstractMatrix, θsMs_tr::AbstractArray{ET,3}, xP) where ET # stack does not work on GPU, see specialized method for GPUArrays below - y_pred = stack( - map(eachcol(CA.getdata(θsP)), eachslice(CA.getdata(θsMs_tr), dims=3)) do θP, θMs + res = map(eachcol(CA.getdata(θsP)), eachslice(CA.getdata(θsMs_tr), dims=3)) do θP, θMs app(θP, θMs, xP) - end) + end + y_pred = stack(getindex.(res, Ref(1))) + add = stack(getindex.(res, Ref(2))) + y_pred, add end # function apply_model(app::AbstractPBMApplicator, θsP::GPUArraysCore.AbstractGPUMatrix, θsMs_tr::GPUArraysCore.AbstractGPUArray{ET,3}, xP) where ET # # stack does not work on GPU, need to resort to slower mapreduce @@ -70,19 +75,20 @@ function apply_model(app::AbstractPBMApplicator, θsP::GPUArraysCore.AbstractGPU # stack does not work on GPU, need to resort to slower mapreduce # for type stability, apply f at first iterate to supply init to mapreduce # avoid Iterators.peel for CUDA - y1 = apply_model(app, CA.getdata(θsP)[:,1], CA.getdata(θsMs_tr)[:,:,1], xP)[2] + y1, add1 = apply_model(app, CA.getdata(θsP)[:,1], CA.getdata(θsMs_tr)[:,:,1], xP)[2] y1a = reshape(y1, :, 1) # add one dimension + add1a = reshape(add1, :, 1) # add one dimension n_sample = size(θsP,2) - y_pred = if (n_sample == 1) - y1a + y_pred_and_add = if (n_sample == 1) + y1a, add1a else - mapreduce((a,b) -> cat(a,b; dims=3), + mapreduce((a,b) -> (cat(a[1],b[1]; dims=3), cat(a[2],b[2]; dims=3)), eachcol(CA.getdata(θsP)[:,2:end]), eachslice(CA.getdata(θsMs_tr)[:,:,2:end], dims=3); - init=y1a) do θP, θMs + init=(y1a, add1a)) do θP, θMs app(θP, θMs, xP) end end - return(y_pred) + return(y_pred_and_add) end @@ -96,7 +102,7 @@ Process-Base-Model applicator that returns its θMs inputs. Used for testing. struct NullPBMApplicator <: AbstractPBMApplicator end function apply_model(app::NullPBMApplicator, θP::AbstractVector, θMs_tr::AbstractMatrix, xP) - return CA.getdata(θMs_tr) + return CA.getdata(θMs_tr), CA.getdata(θMs_tr) end create_nsite_applicator(app::NullPBMApplicator, n_site) = app @@ -130,7 +136,7 @@ end PBMSiteApplicator(fθ; θP, θM, θFix, xPvec) Construct AbstractPBMApplicator from process-based model `fθ` that computes predictions -for a single site. +and additional quantities for a single site. The Applicator combines enclosed `θFix`, with provided `θMs` and `θP` and constructs a `ComponentVector` that can be indexed by symbolic parameter names, corresponding to the templates provided during @@ -170,8 +176,8 @@ function apply_model(app::PBMSiteApplicator, θP::AbstractVector, θMs_tr::Abstr θ = vcat(CA.getdata(θP), CA.getdata(θM_tr), CA.getdata(app.θFix)) θc = app.intθ1(θ); # show errors without ";" xPc = app.int_xPsite(xP1); - ans = CA.getdata(app.fθ(θc, xPc)) - ans + ans = app.fθ(θc, xPc) + CA.getdata(ans[1]), CA.getdata(ans[2]) end # mapreduce-hcat is only typestable with init, which needs number of rows # https://discourse.julialang.org/t/type-instability-of-mapreduce-vs-map-reduce/121136 @@ -189,8 +195,11 @@ function apply_model(app::PBMSiteApplicator, θP::AbstractVector, θMs_tr::Abstr end xP1, it_xP = Iterators.peel(eachcol(CA.getdata(xP))) obs1 = apply_PBMsite(θMs1_tr, xP1) + init = (reshape(obs1[1], :,1),reshape(obs1[2], :, 1)) + hcat_2tuple = (a,b) -> (hcat(a[1], b[1]), hcat(a[2],b[2])) + #hcat_2tuple(init, init) local pred_sites = mapreduce( - apply_PBMsite, hcat, it_θMs_tr, it_xP; init=reshape(obs1, :, 1)) + apply_PBMsite, hcat_2tuple, it_θMs_tr, it_xP; init) return pred_sites end @@ -370,8 +379,8 @@ function apply_model(app::PBMPopulationGlobalApplicator, θP::AbstractVector, θ local θsc_tr = app.intθs(CA.getdata(θs_tr)) local θgc = app.intθg(CA.getdata(θg)) local xPc = app.int_xP(CA.getdata(xP)) - local pred_sites = app.fθpop(θsc_tr, θgc, xPc) - return pred_sites + local y_pred_and_add = app.fθpop(θsc_tr, θgc, xPc) + return y_pred_and_add end diff --git a/src/bijectors_utils.jl b/src/bijectors_utils.jl index 20d437b..029edc2 100644 --- a/src/bijectors_utils.jl +++ b/src/bijectors_utils.jl @@ -1,3 +1,38 @@ +""" + with_logabsdet_jacobians + +Similar to with_logabsdet_jacobian, but returns as the second component a +vector of Jacobians of transformation of each component in x. +""" +function with_logabsdet_jacobians end; + +with_logabsdet_jacobians(::typeof(identity), x) = x, zero(x) + +#MAYBE: need to implement fallbacks for other Bijectors than Exp() +function with_logabsdet_jacobians(sb::Stacked, x::AbstractVector) + if sb.length_in != length(x) + error("input length mismatch ($(sb.length_in) != $(length(x)))") + end + y, logjacs = _with_logabsdet_jacobians(sb, x) + if Bijectors.output_length(sb, length(x)) != length(y) + error("output length mismatch ($(output_length(sb, length(x))) != $(length(y)))") + end + # if size(logjacs) != size(y) + # Main.@infiltrate_main + # end + @assert size(logjacs) == size(y) + return (y, logjacs) +end +function _with_logabsdet_jacobians(sb::Stacked, x::AbstractVector) + ys_and_logjacs = map(zip(sb.bs, sb.ranges_in)) do (b, r) + with_logabsdet_jacobians(b, x[r]) + end + y = reduce(vcat, map(first, ys_and_logjacs)) + #logjacs = reduce(+, map(last, ys_and_logjacs)) + logjacs = reduce(vcat, map(last, ys_and_logjacs)) + return (y, logjacs) +end + #------------------- Exp """ @@ -21,6 +56,19 @@ Bijectors.logabsdetjac(b::Exp, x) = sum(x) function Bijectors.with_logabsdet_jacobian(b::Exp, x) return exp.(x), sum(x) end + +""" + with_logabsdet_jacobians + +Similar to with_logabsdet_jacobian, but returns as the second component a +vector of Jacobians of transformation of each component in x, rather than +the sum. +""" +function with_logabsdet_jacobians(b::Exp, x) + return exp.(x), x +end + + # function Bijectors.with_logabsdet_jacobian(ib::Inverse{<:Exp}, y) # x = transform(ib, y) # return x, -logabsdetjac(inverse(ib), x) @@ -54,6 +102,11 @@ end # x = transform(ib, y) # return x, -logabsdetjac(inverse(ib), x) # end + +function with_logabsdet_jacobians(b::Logistic, x) + return transform(b,x), loglogistic.(x) .+ log1mlogistic.(x) +end + Bijectors.is_monotonically_increasing(::Logistic) = true @@ -108,6 +161,20 @@ function Bijectors.with_logabsdet_jacobian(sb::StackedArray, x::AbstractArray) return (ym, logjac) end +""" + with_logabsdet_jacobians(sb::StackedArray, x::AbstractArray) + +Return a Jacobian for each row in x. +""" +function with_logabsdet_jacobians(sb::StackedArray, x::AbstractArray) + (y, logjacs_vec) = with_logabsdet_jacobians(sb.stacked, vec(x)) + ym = reshape(y, size(x)) + # move sum to elbo, here return all components + #logjacs = sum(reshape(logjacs_vec, sb.nrow, :); dims = 2)[:,1] + logjacs = reshape(logjacs_vec, sb.nrow, :) + return (ym, logjacs) +end + function Bijectors.inverse(sb::StackedArray) inv_stacked = inverse(sb.stacked) return StackedArray{typeof(inv_stacked)}(sb.nrow, inv_stacked) diff --git a/src/clustering.jl b/src/clustering.jl new file mode 100644 index 0000000..179cd8e --- /dev/null +++ b/src/clustering.jl @@ -0,0 +1,259 @@ +""" + refit_clusters(rng, probo, solver, xM; scenario, n_cluster_initial, n_aggsplits, epochs) + +Iteratively refit the model and split clusters of sites based on overdispersion tests. + +When several sites are within one cluster, they are treated in a way, such that +all the observations constrain the uncertainty of the mean estimate within that +cluster. The fewer sites are within one cluster, the higher the uncertainty estimate. + +This methods implements a strategy to start with few clusters and checks if the +distribution of predicted site values within a cluster is overdispersed relative to +the uncertainty predicted for the cluster. If so, the cluster is split into smaller clusters +and the model is refitted. Because, the refitting changes the uncertainty estimates, +only the few (1/10th) clusters with the most sites are checked for overdispersion, and +then a refitting takes place before checking the next clusters. + +# Arguments +- `rng`: random number generator for reproducibility +- `probo`: the probabilistic model to fit +- `solver`: optimization algorithm for fitting +- `xM`: input data for the model +- `scenario`: optional argument for different scenarios in the model +- `n_cluster_initial`: number of clusters to start with +- `n_aggsplits`: number of clusters to split before refitting +- `epochs`: number of epochs for refitting after each series of splits + +# Returns +- `probo`: the refitted probabilistic model +- `clusters`: final cluster assignments for each site +""" +function refit_clusters(rng, probo, solver, xM ; + scenario = Val(()), + epochs, + n_cluster_initial = 12, + n_aggsplits = 5, + n_cluster_sub = 4, + ) + Ucor = get_hybridproblem_cholesky_correlation_Ms(probo; xM, scenario) + (; X, σMs) = extract_MLpred(probo, xM; scenario) + σM = vec(median(σMs; dims=2)) + # first clustering based on uncertainty of unclusterd sites but with argument cluster_rep + clusters = clusters0 = cluster_records(X, Ucor, σM; n_cluster_sub = n_cluster_initial) + (; probo, X, σMs) = refit(rng, probo, solver, xM, clusters; scenario, epochs) + # cnts_clusters_tosplit tracks the number of sites in clusters still to check, + # and will be updated on splitting clusters + cnts_clusters_totest = cnts0 = StatsBase.countmap(clusters) + while (length(cnts_clusters_totest) > 0) + # number of splits to do before refitting + n_aggsplits_i = max(n_aggsplits, length(cnts_clusters_totest) ÷ 10) + @show length(cnts_clusters_totest), n_aggsplits_i + # collect newly created smaller cluster, but only check them after refit + cnts_new = Dict{eltype(clusters), Int}() + while (n_aggsplits_i > 0) && (length(cnts_clusters_totest) > 0) + #global cnts, clusters, i_splits + i_cluster = argmax(cnts_clusters_totest) + i_sites = findall(isequal.(clusters, i_cluster)) + σM = vec(median(σMs[:,i_sites]; dims=2)) + # + (; is_overdispersed, clusters, clusters_sub) = split_cluster( + clusters, i_cluster, X, Ucor, σMs; n_cluster_sub) + @show i_cluster, cnts_clusters_totest[i_cluster], is_overdispersed + delete!(cnts_clusters_totest, i_cluster) # remove the inspected cluster + if is_overdispersed + cnts_new_cl = StatsBase.countmap(clusters_sub) + cnts_new = merge(cnts_new, cnts_new_cl) + n_aggsplits_i = n_aggsplits_i - 1 + end + end + if length(cnts_new) != 0 + cnts_clusters_totest = merge(cnts_clusters_totest, cnts_new) + (; probo, X, σMs) = refit(rng, probo, solver, xM, clusters; scenario, epochs) + end + end + (; probo, clusters) +end + +function refit(rng, probo, solver, xM, clusters; scenario, epochs) + (; probo) = solve(probo, solver; rng, + callback = callback_loss(100), # output during fitting + epochs, + clusters, + ); + (; X, σMs) = extract_MLpred(probo, xM; scenario) + (; probo, X, σMs) +end + +function extract_MLpred(probo, xM; scenario = Val(())) + g, ϕg0 = get_hybridproblem_MLapplicator(probo; scenario) + n_θ = HybridVariationalInference.get_numberof_θM(probo.approx, ϕg0) + ζ = g(xM, probo.ϕg) + X = ζ'[:,1:n_θ] + (;σP, σMs) = get_marginal_std(probo, xM; scenario) + (; X, σMs) +end + +function cluster_records(X_matrix::AbstractMatrix, Ucor::AbstractMatrix{T}, σM::AbstractVector; n_cluster_sub=4, cluster_ids = 1:n_cluster_sub, ) where T + # x_i -x_j are distributed N(0, 2Σ) + #Σ = 2 * HybridVariationalInference.compute_cov(Ucor, σM) + @assert n_cluster_sub == length(cluster_ids) "Length of cluster_ids must match n_cluster" + invΣ = HybridVariationalInference.compute_invcov(Ucor, σM) / T(2) + # Precompute differences: (X_i - X_j) for all i, j + # Use broadcasting to compute all pairwise differences + #diffs = X_matrix' .- X_matrix # Shape: (n_vars, n_rows, n_rows) + n_rows, n_vars = size(X_matrix) # e.g., 2000×10 + n_rows < n_cluster_sub && error("Cannot cluster $nrows records into $n_cluster_sub clusters.") + if n_rows == n_cluster_sub + # assign each record to its own cluster + clusters0 = 1:n_rows + else + diffs = reshape(X_matrix, n_rows, 1, n_vars) .- reshape(X_matrix, 1, n_rows, n_vars) + dist2_matrix = zeros(n_rows, n_rows) + for i in 1:n_rows + for j in 1:n_rows + diff = diffs[i, j, :] # (n_vars,) + dist2_matrix[i, j] = diff' * invΣ * diff + end + end + dist_matrix = sqrt.(dist2_matrix) + # If you want to go faster, use: + # dist_matrix = sqrt.(sum((diffs * invΣ) .* diffs, dims=3)[:, :, 1]) + res_clust = hclust(dist_matrix; linkage = :ward) + clusters0 = cutree(res_clust; k = n_cluster_sub) + () -> begin + counts(clusters0) + scatter(X_matrix[:,1], X_matrix[:,2], color = clusters0) + end + end + # translate 1:n_cluster to provided cluster_ids + clusters = cluster_ids[clusters0] +end + + +""" + compute_pvalue_asymptotic_overdispersion_from_dist2(dist2_matrix) + +Compute p-value for overdispersion using asymptotic approximation, +based on a precomputed matrix of squared Mahalanobis distances. + +# Arguments +- `dist2_matrix`: m × m symmetric matrix of squared Mahalanobis distances + (dist2_matrix[i,j] = (x_i - x_j)' Σ⁻¹ (x_i - x_j)) +- n: the dimension of x_i (number of variables) +- The matrix must be symmetric and contain only upper/lower triangle values + +# Returns +- `p_value`: one-sided p-value for overdispersion +""" +function compute_pvalue_asymptotic_overdispersion_from_dist2(dist2_matrix, n) + m = size(dist2_matrix, 1) + + # Number of unique pairs (i < j) + N = m * (m - 1) ÷ 2 + + # Number of triplets (i,j,k) with i < j < k + K = m * (m - 1) * (m - 2) ÷ 6 + + # Extract upper triangle (i < j) and sum + S_obs = 0.0 + for i in 1:m + for j in i+1:m + S_obs += dist2_matrix[i, j] + end + end + + # Expected value under H0: each D_ij^2 ~ χ²_n → E[D_ij^2] = n + μ₀ = N * n + + # Variance under H0 (corrected for dependence between overlapping pairs) + var_S = 2 * n * N + 4 * n * K + + # Z-score + z_score = (S_obs - μ₀) / sqrt(var_S) + + # One-sided p-value: is the observed spread significantly larger? + p_value = 1 - StatsFuns.normcdf(z_score) + + return p_value +end + +""" + overdispersion_test(Y, μ, Σ; α=0.05) + +Test whether the q×p sample matrix Y (rows = individuals) is overdispersed +relative to the reference distribution N(μ, Σ). + +Returns: S_n, E0, Var0, Z, p_value_normal, p_value_chisq +""" +function check_overdispersion( + X_matrix::AbstractMatrix{T}, Ucor::AbstractMatrix{T}, σM::AbstractVector{T}; + α::S=0.05 + ) where {S,T} + # see test_overdispersion_theory.md + n, p = size(X_matrix) + #Σ = HybridVariationalInference.compute_cov(Ucor, σM) + invΣ = HybridVariationalInference.compute_invcov(Ucor, σM) + # Step 1: Compute sum of distances across sample pairs + S_n = zero(T) + for i in 1:n + for j in (i+1):n + d = X_matrix[i, :] - X_matrix[j, :] + S_n += dot(d, invΣ * d) + end + end + # Step 2: compute expected (H0) moments + E0 = convert(S, p * n * (n - 1)) + Var0 = S(2.0) * p * abs2(n) * (n - 1) # see VarSumDij.md + SD0 = sqrt(Var0) + # Step 3: p-values of standardized statistic + Z = (S_n - E0) / SD0 + p_val_normal = one(S) - StatsFuns.normcdf(Z) + + ν = p * (n - 1) # degrees of freedom + c = S(n) # scaling constant + p_val_chisq = one(S) - cdf(Chisq(ν), S_n / c) + + is_overdispersed = p_val_chisq < α + #Main.@infiltrate_main + + return (; is_overdispersed, + S_n=S_n, E0=E0, Var0=Var0, p_normal=p_val_normal, p_chisq=p_val_chisq ) +end + +function split_cluster(clusters, i_cluster, X, Ucor, σMs; n_cluster_sub=4) + i_sites = findall(isequal.(clusters, i_cluster)) + n_sites_cl = length(i_sites) + if n_sites_cl == 1 + return (; is_overdispersed = false, clusters, clusters_sub = eltype(clusters)[]) + end + X_cluster = X[i_sites,:] + σM = vec(median(σMs[:,i_sites]; dims=2)) + # vec(std(X_cluster; dims = 1)) + is_overdispersed = check_overdispersion(X_cluster, Ucor, σM)[1] + if is_overdispersed + n_cluster_sub_i = min(n_cluster_sub, n_sites_cl) + cluster_ids = vcat(i_cluster, maximum(clusters) .+ (1:(n_cluster_sub_i-1))) + clusters_sub = cluster_records(X_cluster, Ucor, σM; cluster_ids, n_cluster_sub = n_cluster_sub_i); + clusters[i_sites] = clusters_sub + else + clusters_sub = eltype(clusters)[] + end + (; is_overdispersed, clusters, clusters_sub) + # else + # # split into "clusters" of single observations + # @info("Debug splitting cluster into single sites.") + # Main.@infiltrate_main + + # clusters_sub = vcat(i_cluster, maximum(clusters) .+ (1:(n_sites_cl-1))) + # clusters[i_sites] = cluster_sub + # (; is_overdispersed = false, clusters, clusters_sub) + # end +end + +function map_by_cluster(f, clusters) + cluster_ids = unique(clusters) + map(cluster_ids) do cluster_id + i_sites = findall(isequal.(clusters, cluster_id)) + f(i_sites) + end +end diff --git a/src/elbo.jl b/src/elbo.jl index 76d262c..199332e 100644 --- a/src/elbo.jl +++ b/src/elbo.jl @@ -30,10 +30,11 @@ expected value of the likelihood of observations. function neg_elbo_gtf(args...; kwargs...) # TODO prior and penalty loss (;nLjoint, entropy_ζ, loss_penalty, - nLy, neg_log_prior, neg_log_jac, + nLy, nLprior_P, nLprior_M, neg_log_jac, #nLmean_θ ) = neg_elbo_gtf_components(args...; kwargs...) - nL = nLjoint - entropy_ζ + loss_penalty #+ nLmean_θ + # negative of log_joint - need to subtract entropy_ζ + nL = nLjoint + loss_penalty - entropy_ζ #+ nLmean_θ # if !isfinite(nL) # @show nL # @show nLjoint, entropy_ζ, loss_penalty, nLy, @@ -58,12 +59,23 @@ function neg_elbo_gtf_components(rng, ϕ::AbstractVector{FT}, g, f, py, trans_mP =StackedArray(transP, n_MC), # provide with creating cost function trans_mMs =StackedArray(transMs.stacked, n_MC), priorsP, priorsM, - floss_penalty = zero_penalty_loss, + penalty_computer = ZeroPenaltyComputer(), is_testmode, is_omit_priors, zero_prior_logdensity, approx::AbstractHVIApproximation, + intθP, intθMs, + frac_cluster_all, ) where {FT} + ϕc = int_ϕg_ϕq(ϕ) + VT= typeof(@view(ϕ[1:1])) + ϕg = CA.getdata(ϕc.ϕg)::VT + ϕq = CA.getdata(ϕc.ϕq)::VT + if(!all(isfinite.(ϕ))) + @show ϕq + @show ϕg + error("encountered non-finite optimized parameters") + end n_MCr = isempty(priors_θP_mean) ? n_MC : max(n_MC, n_MC_mean) ζsP, ζsMs_tr, σ = generate_ζ(approx, rng, g, ϕ, xM; n_MC=n_MCr, cor_ends, pbm_covar_indices, int_ϕq, int_ϕg_ϕq, is_testmode, i_sites) @@ -71,14 +83,11 @@ function neg_elbo_gtf_components(rng, ϕ::AbstractVector{FT}, g, f, py, ζsMs_tr_cpu = cdev(ζsMs_tr) # fetch to CPU, because for <1000 sites (n_batch) this is faster # # maybe: translate ζ once and supply to both neg_elbo and negloglik_meanθ - ϕc = int_ϕg_ϕq(ϕ) - VT= typeof(@view(ϕ[1:1])) - ϕg = CA.getdata(ϕc.ϕg)::VT - ϕq = CA.getdata(ϕc.ϕq)::VT loss_comps = neg_elbo_ζtf( ζsP_cpu[:,1:n_MC], ζsMs_tr_cpu[:,:,1:n_MC], σ, f, py, xP, y_ob, y_unc; n_MC_cap, transP, transMs, priorsP, priorsM, - floss_penalty, ϕg, ϕq, is_omit_priors, zero_prior_logdensity,) + penalty_computer, ϕg, ϕq, is_omit_priors, zero_prior_logdensity, + i_sites, intθMs, intθP, frac_cluster_all) # # maybe: provide trans_mP and trans_mMs with creating cost function # not used any more and merging named tuples takes long @@ -126,23 +135,38 @@ Compute the neg_elbo for each sampled parameter vector (last dimension of ζs). - `neg_log_prior`: the prior of parameters at constrained scale - `logjac`, negative logarithm of the absolute value of the determinant of the Jacobian of the transformation `θ=T(ζ)`. -- `loss_penalty`: additional loss terms from floss_penalty +- `loss_penalty`: additional loss terms from penalty_computer - compute entropy of transformation """ -function neg_elbo_ζtf(ζsP, ζsMs_tr, σ, f, py, xP, y_ob, y_unc; +function neg_elbo_ζtf(ζsP::AbstractArray{T}, ζsMs_tr, σ, f, py, xP, y_ob, y_unc; n_MC_cap=size(ζsP,2), transP, transMs=StackedArray(transM, size(ζsMs_tr, 2)), priorsP, priorsM, - floss_penalty, ϕg, ϕq, + penalty_computer, ϕg, ϕq, is_omit_priors::Val, zero_prior_logdensity, -) + i_sites, intθP, intθMs, + frac_cluster_all, +) where T n_MC = size(ζsP,2) - f_sample = (ζP, ζMs_tr) -> begin - θP, θMs_tr, logjac_i = transform_and_logjac_ζ(ζP, ζMs_tr; transP, transMs) + #@show ζsMs_tr[1,4,:] # fourth component goes to NaN at some time + if !all(isfinite.(ζsMs_tr)) + return (; + nLjoint=T(1e9), entropy_ζ=zero(T), loss_penalty=zero(T), nLy=zero(T), + neg_log_prior=T(1e9), neg_log_jac=zero(T)) + end + frac_cluster = frac_cluster_all[i_sites] + f_sample = (ζP, ζMs_tr) -> begin + θP, θMs_tr, logjac_P, logjac_Ms = transform_and_logjac_ζ(ζP, ζMs_tr; transP, transMs) + if !all(isfinite.(θMs_tr)) + i_row = findfirst(θM -> !all(isfinite.(θM)), eachrow(θMs_tr)) + @show i_row #info "encountered non-finite θMs_tr at $(i_row)th site" + @show θMs_tr[i_row,:] + @show ζMs_tr[i_row,:] + end # currently logpdf only works on CPU - y_pred_i = f(θP, θMs_tr, xP) + (y_pred_i, addq_pred_i) = f(θP, θMs_tr, xP) #nLy1 = neg_logden_indep_normal(y_ob, y_pred_i, y_unc) # Main.@infiltrate_main # Test.@inferred( f(θP, θMs, xP) ) @@ -150,11 +174,24 @@ function neg_elbo_ζtf(ζsP, ζsMs_tr, σ, f, py, xP, y_ob, y_unc; # @usingany Cthulhu # @descend_code_warntype f(θP, θMs, xP) nLy_i = py(y_ob, y_pred_i, y_unc) - loss_penalty_i = convert(eltype(nLy_i),floss_penalty(y_pred_i, θMs_tr, θP, ϕg, ϕq)) - neg_log_prior_i = compute_priors_logdensity(priorsP, priorsM, θP, θMs_tr, + # MAYBE avoid convert by making sure penalty_computer returns proper type + # Test.@inferred compute_penalty(penalty_computer, y_pred_i, addq_pred_i, intθMs(θMs_tr), intθP(θP), i_sites, ϕg, ϕq)[1] + # loss_penalty_i = convert.(typeof(nLy_i),first(compute_penalty(penalty_computer, + # y_pred_i, addq_pred_i, intθMs(θMs_tr), intθP(θP), i_sites, ϕg, ϕq))) + loss_penalty_i = compute_penalty(penalty_computer, + y_pred_i, addq_pred_i, intθMs(θMs_tr), intθP(θP), i_sites, ϕg, ϕq)[1] + nLprior_P_i, nLprior_M_is = compute_priors_logdensity(priorsP, priorsM, θP, θMs_tr, is_omit_priors, zero_prior_logdensity) # make sure names to not match outer, otherwise Box type instability - (nLy_i, neg_log_prior_i, -logjac_i, loss_penalty_i) + # scale Likelihood and penalties to estimate all-site case from batch case + # scale nLy, priorsM, log_jac (sum for Exp), loss_penalty, and entropy + # essentially all, except prior_θP + # penalty should also be scaled, but then it does not select good parameters + #loss_penalty_if = sum(loss_penalty_i .* frac_cluster) + loss_penalty_if = sum(loss_penalty_i) + nLprior_M_if = sum(nLprior_M_is .* frac_cluster) + neg_log_jac_if = -logjac_P -sum(logjac_Ms .* frac_cluster) + (nLy_i, nLprior_P_i, nLprior_M_if, neg_log_jac_if, loss_penalty_if) #(nLy_i, 0.0, 0.0, 0.0) end # only Vector inferred, need to provide type hint @@ -163,16 +200,17 @@ function neg_elbo_ζtf(ζsP, ζsMs_tr, σ, f, py, xP, y_ob, y_unc; # Test.@inferred map(f_sample, eachcol(ζsP), eachslice(ζsMs; dims=3)) #using ShareAdd #@usingany Cthulhu - #@descend_code_warntype f_sample(first(eachcol(ζsP)), first(eachslice(ζsMs; dims=3))) - #map_res = Test.@inferred map(f_sample, eachcol(ζsP), eachslice(ζsMs; dims=3)) + #@descend_code_warntype f_sample(first(eachcol(ζsP)), first(eachslice(ζsMs_tr; dims=3))) + #Test.@inferred map(f_sample, eachcol(ζsP), eachslice(ζsMs_tr; dims=3)) map_res = map(f_sample, eachcol(ζsP), eachslice(ζsMs_tr; dims=3)) - nLys, neg_log_priors, neglogjacs, loss_penalties = vectuptotupvec(map_res) + nLys, nLpriors_P, nLpriors_M, neglogjacs, loss_penalties = vectuptotupvec(map_res) # For robustness may compute the expectation only on the n_smallest values # because its very sensitive to few large outliers #nLys_smallest = nsmallest(n_MC_cap, nLys) # does not work with Zygote if n_MC_cap == n_MC nLy = sum(nLys) / n_MC - neg_log_prior = sum(neg_log_priors) / n_MC + nLprior_P = sum(nLpriors_P) / n_MC + nLprior_M = sum(nLpriors_M) / n_MC neg_log_jac = sum(neglogjacs) / n_MC loss_penalty = sum(loss_penalties) / n_MC else @@ -182,13 +220,16 @@ function neg_elbo_ζtf(ζsP, ζsMs_tr, σ, f, py, xP, y_ob, y_unc; end # sum_log_σ = sum(log.(σ)) # logdet_jacT2 = -sum_log_σ # log Prod(1/σ_i) = -sum log σ_i - logdetΣ = 2 * sum(log.(σ)) - n_θ = size(ζsP, 1) + prod(size(ζsMs_tr)[1:2]) - if length(σ) != n_θ - error("TODO infiltrate") - #Main.@infiltrate_main - end - #@assert length(σ) == n_θ + #logdetΣ = 2 * sum(log.(σ)) # det(Σ) = Prod(σ_i^2) + # also scale entropy (that depends on logdetΣ) for only a fraction of sites in btach + n_θP = size(ζsP,1) + n_θM, n_site = size(ζsMs_tr)[1:2] + n_θ = n_θP + n_θM * n_site + @assert length(σ) == n_θ + σP = σ[1:n_θP] + σMs = reshape(σ[(n_θP+1):end], :, n_site) + #logdetΣ = 2 * (sum(log.(σ))) # det(Σ) = Prod(σ_i^2) + logdetΣ = 2 * (sum(log.(σP)) + sum(frac_cluster .* log.(σMs))) # det(Σ) = Prod(σ_i^2) entropy_ζ = entropy_MvNormal(n_θ, logdetΣ) # defined in logden_normal # if i_sites[1] == 1 # #Main.@infiltrate_main @@ -196,18 +237,19 @@ function neg_elbo_ζtf(ζsP, ζsMs_tr, σ, f, py, xP, y_ob, y_unc; # @show std(nLys), std(nLys)/abs(nLy) # @show std(nLys_smallest), std(nLys_smallest)/abs(nLy) # end - nLjoint = nLy + neg_log_prior + neg_log_jac - (;nLjoint, entropy_ζ, loss_penalty, nLy, neg_log_prior, neg_log_jac) + nLjoint = nLy + nLprior_P + nLprior_M + neg_log_jac + (;nLjoint, entropy_ζ, loss_penalty, + nLy, nLprior_P, nLprior_M, neg_log_jac) end function compute_priors_logdensity(priorsP, priorsM, θP, θMs_tr, ::Val{omit_priors}, zero_prior_logdensity) where {omit_priors} if omit_priors - zero_prior_logdensity + (; nLprior_P = zero_prior_logdensity, nLprior_M = fill(zero_prior_logdensity, size(θMs_tr,1))) elseif (θP isa AbstractGPUArray) || (θMs_tr isa AbstractGPUArray) @warn("neg_elbo_ζtf: Cannot apply priors to gpu array. Piors are omitted. "* "either compute PBM on CPU or omit priors.") - zero_prior_logdensity + (; nLprior_P = zero_prior_logdensity, nLprior_M = fill(zero_prior_logdensity, size(θMs_tr,1))) else compute_priors_logdensity(priorsP, priorsM, θP, θMs_tr, zero_prior_logdensity) end @@ -215,74 +257,32 @@ end function compute_priors_logdensity(priorsP, priorsM, θP, θMs_tr, zero_prior_logdensity) logpdf_t = (prior, θ) -> logpdf(prior, θ)::eltype(θP) - function logpdf_tv_sum(prior, θ::AbstractVector{T}) where T - # logpdf_tv_sum_inner = let prior = prior - # function(θi) - # lp = logpdf(prior, θi) - # # TT = ChainRulesCore.@ignore_derivatives Base.return_types(logpdf, Tuple{typeof(prior), typeof(θi)}) - # # if TT != [typeof(lp)] - # # error("encountered unstable logpdf: $TT") - # # end - # lp - # end - # end - # sum(logpdf_tv_sum_inner, θ) - sum(θi -> logpdf(prior, θi), θ) - end # handle edge case of no global parameters, where priorsP is empty nlP0 = isempty(priorsP) ? zero_prior_logdensity : -sum(logpdf_t.(priorsP, θP)) - # fi = (priorMi, θMi) -> begin - # logpdf_tv_sum(priorMi, θMi) - # sum(logpdf_tv_sum(priorMi, θMi))::eltype(θMi) - # end - f_col = let priorsM=priorsM, θMs_tr=θMs_tr - function f_col_inner(i) - # TP = ChainRulesCore.@ignore_derivatives Base.return_types(getindex, Tuple{typeof(priorsM), typeof(i)}) - # if TP != [typeof(priorsM[i])] - # error("encountered unstable priorsM: $TP") - # end - logpdf_tv_sum(priorsM[i], θMs_tr[:,i]) - #Tθ = Base.return_types(getindex, Tuple{typeof(θMs), Colon, typeof(i)}) - #TRET = Base.return_types(logpdf_tv_sum, Tuple{typeof(priorsM[i]), typeof(θMs[:,i])}) - end - end - # init keyword does not work with Zygote - #nlMs_sum = sum(f_col, 1:length(priorsM), init = zero(nlP0)) - nlMs_sum = sum(f_col, 1:length(priorsM))::typeof(nlP0) # not type inferred in julia 1.10 - neg_log_prior_i = nlP0 - nlMs_sum - if !isfinite(neg_log_prior_i) - @show neg_log_prior_i, nlP0 - @show θMs_tr + # prior for each parameter across vector (therefore Base.Fix1) of site + #nLprior_Ms_pars = map(i_par -> -logpdf(priorsM[i_par], θMs_tr[:,i_par])::Vector{typeof(nlP0)}, 1:length(priorsM)) + nLprior_Ms_pars = map(i_par -> -map( + Base.Fix1(logpdf, priorsM[i_par]), θMs_tr[:,i_par])::Vector{typeof(nlP0)}, + 1:length(priorsM)) + # aggregate across vars, for each site + nLprior_Ms = reduce(+, nLprior_Ms_pars) + if !isfinite(nlP0) || !all(isfinite.(nLprior_Ms)) + @show nlP0, sum(nLprior_Ms) @show priorsM + @show θMs_tr + @show nLprior_Ms error("inspect non-finite priors") end - neg_log_prior_i + (; nLprior_P = nlP0, nLprior_Ms) end -""" - zero_penalty_loss(y_pred, θMs, θP, ϕg, ϕq) - -Add zero i.e. no additional loss terms during the HVI fit. - -The basic cost in HVI is the negative log of the joint probability, i.e. -the likelihood of the observations given the parameters * prior probability -of the parameters. - -Sometimes there is additional knowledge not encoded in the prior, such as -one parameter must be larger than another, or entropy-weights of the -ML-parameters, and the solver accept a function to add additional loss terms. - -Arguments -- y_pred::AbstractMatrix: Observations -- θMs::AbstractMatrix: site parameters -- θP::AbstractVector: global parameters -- ϕg: ML-model parameters, -- ϕq::AbstractVector, additional parameters of the posterior -""" -function zero_penalty_loss( - y_pred::AbstractMatrix, θMs::AbstractMatrix, θP::AbstractVector, +struct ZeroPenaltyComputer <: AbstractPenaltyComputer end +function compute_penalty( + ::ZeroPenaltyComputer, + y_pred::AbstractMatrix, addq_pred::AbstractMatrix, θMs_tr::AbstractMatrix, θP::AbstractVector, + i_sites::AbstractVector, ϕg, ϕq::AbstractVector) - return zero(eltype(θMs)) + return (; penalty = fill(zero(eltype(θMs_tr)), size(θMs_tr,1))) end @@ -304,12 +304,20 @@ Prediction function for hybrid variational inference parameter model. Returns an NamedTuple `(; y, θsP, θsMs_tr, entropy_ζ)` with entries - `y`: Array `(n_obs, n_site, n_sample_pred)` of model predictions. +- `addq`: Array `(n_addq, n_site, n_sample_pred)` of additional quantities computed + by the PBM. - `θsP`: ComponentArray `(n_θP, n_sample_pred)` of PBM model parameters that are kept constant across sites. - `θsMs_tr`: ComponentArray `(n_site, n_θM, n_sample_pred)` of PBM model parameters that vary by site. - `entropy_ζ`: The entropy of the log-determinant of the transformation of the set of model parameters, which is involved in uncertainty quantification. +- `ζsP`: ComponentArray `(n_θP, n_sample_pred)` of PBM model parameters + that are kept constant across sites at the unconstrained scale. +- `ζsMs_tr`: ComponentArray `(n_site, n_θM, n_sample_pred)` of PBM model parameters + that vary by site at the unconstrained scale. +- `penalties`: output of problems penalty computer average across samples as a ComponentVector + Each component is a vector of length n_site. Note that for some approximations, such as `MeanVarSepHVIApproximation`, `prob.ϕq` contains uncertainty parameters that are specific to sites. @@ -322,6 +330,7 @@ function predict_hvi(rng, prob::AbstractHybridProblem; scenario=Val(()), xM = nothing, xP = nothing, is_testmode = true, i_sites = nothing, + n_sample_pred = 200, kwargs... ) if isnothing(xM) || isnothing(xP) @@ -333,8 +342,8 @@ function predict_hvi(rng, prob::AbstractHybridProblem; scenario=Val(()), xM = isnothing(xM) ? xM_dl[:,i_sites] : xM end # sample_posterior required consistent prob.ϕq and xM - (; θsP, θsMs_tr, entropy_ζ) = sample_posterior( - rng, prob, xM; scenario, gdevs, is_testmode, i_sites, kwargs...) + (; θsP, θsMs_tr, entropy_ζ, ζsP, ζsMs_tr) = sample_posterior( + rng, prob, xM; scenario, gdevs, is_testmode, i_sites, n_sample_pred,kwargs...) # n_site, n_batch = get_hybridproblem_n_site_and_batch(prob; scenario) n_site_pred = size(θsMs_tr,1) # determined by size(xM) @@ -347,8 +356,28 @@ function predict_hvi(rng, prob::AbstractHybridProblem; scenario=Val(()), f_dev = f end #y = apply_process_model(θsP, θsMs_tr, f_dev, xP) - y = f_dev(θsP, θsMs_tr, xP) - (; y, θsP, θsMs_tr, entropy_ζ) + (y, addq) = f_dev(θsP, θsMs_tr, xP) + # compute penalties + penalty_computer = get_hybridproblem_penalty_computer(prob; scenario) + pt = get_hybridproblem_par_templates(prob; scenario) + intθP = ComponentArrayInterpreter(pt.θP) + intθMs = ComponentArrayInterpreter((n_site_pred,), pt.θM) + #i_MC = 1 + #MAYBE use StaticArrays from NamedTuple and initial value + penalties_sum = mapreduce((x,y) -> x .+ y, axes(y,3)) do i_MC + CA.ComponentVector( + compute_penalty(penalty_computer, + y[:,:,i_MC], addq[:,:,i_MC], intθMs(θsMs_tr[:,:,i_MC]), intθP(θsP[:,i_MC]), i_sites, + prob.ϕg, prob.ϕq) + ) # TODO separate from prob + end + # reshape into ComponentMatrix with site rows + # intPen = ComponentArrayInterpreter((n_site_pred,), + # CA.ComponentVector(NamedTuple{keys(penalties_sum)}(0.0 for _ in keys(penalties_sum))) + # ) + # penalties = intPen(CA.getdata(penalties_sum)) ./ n_sample_pred + penalties = reshape_penalty_matrix(penalties_sum) ./ n_sample_pred + (; y, addq, θsP, θsMs_tr, entropy_ζ, ζsP, ζsMs_tr, penalties) end """ @@ -420,13 +449,18 @@ function sample_posterior(rng, prob::AbstractHybridProblem, xM::AbstractMatrix; if isnothing(approx) approx = prob.approx # assuming has field approx, e.g. if its a HybridProblem end - (; θsP, θsMs_tr, entropy_ζ) = sample_posterior(rng, g_dev, ϕ_dev, xM; + (; θsP, θsMs_tr, entropy_ζ, ζsP, ζsMs_tr) = sample_posterior(rng, g_dev, ϕ_dev, xM; int_ϕg_ϕq, int_ϕq, transP, transM, n_sample_pred, cdev=infer_cdev(gdevs), cor_ends, pbm_covar_indices, approx, kwargs...) - θsPc = ComponentArrayInterpreter(par_templates.θP, (n_sample_pred,))(θsP) - θsMsc_tr = ComponentArrayInterpreter((n_site,), par_templates.θM, (n_sample_pred,))(θsMs_tr) - (; θsP=θsPc, θsMs_tr=θsMsc_tr, entropy_ζ) + # attach ComponentArray structure + intP = ComponentArrayInterpreter(par_templates.θP, (n_sample_pred,)) + θsPc = intP(θsP) + ζsPc = intP(ζsP) + intMs = ComponentArrayInterpreter((n_site,), par_templates.θM, (n_sample_pred,)) + θsMsc_tr = intMs(θsMs_tr) + ζsMsc_tr = intMs(ζsMs_tr) + (; θsP=θsPc, θsMs_tr=θsMsc_tr, entropy_ζ, ζsP=ζsPc, ζsMs_tr=ζsMsc_tr) end function sample_posterior(rng, g, ϕ::AbstractVector, xM::AbstractMatrix; @@ -454,7 +488,7 @@ function sample_posterior(rng, g, ϕ::AbstractVector, xM::AbstractMatrix; θsP, θsMs_tr = is_infer ? Test.@inferred(transform_ζs(ζsP, ζsMs_tr; trans_mP, trans_mMs)) : transform_ζs(ζsP, ζsMs_tr; trans_mP, trans_mMs) - (; θsP, θsMs_tr, entropy_ζ) + (; θsP, θsMs_tr, entropy_ζ, ζsP, ζsMs_tr) end @@ -470,7 +504,8 @@ The output shape of size `(n_site x n_par x n_MC)` is tailored to iterating each MC sample and then transforming each parameter on block across sites. """ function generate_ζ( - approx::Union{AbstractMeanHVIApproximation, AbstractMeanVarSepHVIApproximation}, + #approx::Union{AbstractMeanHVIApproximation, AbstractMeanVarSepHVIApproximation}, + approx::AbstractHVIApproximation, rng::AbstractRNG, g, ϕ::AbstractVector{FT}, xM::MT; int_ϕg_ϕq::AbstractComponentArrayInterpreter, @@ -490,9 +525,16 @@ function generate_ζ( # TODO replace pbm_covar_indices by ComponentArray? dimensions to be type-inferred? xMP0 = _append_each_covars(xM, CA.getdata(μ_ζP), pbm_covar_indices) ϕm0 = g(xMP0, ϕg; is_testmode) - μ_ζMs0 = ϕm0 ζP_resids, ζMs_parfirst_resids, σ = sample_ζresid_norm(approx, rng, i_sites, ϕm0, ϕq; n_MC, cor_ends, int_ϕq) + n_θm = size(ζMs_parfirst_resids, 1) + μ_ζMs0 = ϕm0[1:n_θm, :] + # if !all(isfinite.(μ_ζMs0)) + # @show μ_ζMs0 + # is_infinite_ϕg = !all(isfinite.(ϕg)) + # @show is_infinite_ϕg + # error("encountered non-finite μ_ζMs0") + # end ζsP = isempty(μ_ζP) ? ζP_resids : (μ_ζP .+ ζP_resids) # n_par x n_MC if pbm_covar_indices isa SA.SVector{0} # do not need to predict again but just add the residuals to μ_ζP and μ_ζMs @@ -587,14 +629,18 @@ ML-model predcitions of size `(n_θM, n_site)`. * `int_ϕq`: Interpret vector as ComponentVector with components ρsP, ρsM, logσ2_ζP, coef_logσ2_ζMs(intercept + slope), """ -function sample_ζresid_norm(approx::AbstractHVIApproximation, rng::Random.AbstractRNG, +function sample_ζresid_norm( + #approx::Union{AbstractMeanHVIApproximation,AbstractMeanVarSepHVIApproximation}, + approx::AbstractHVIApproximation, + rng::Random.AbstractRNG, i_sites, ϕm::AbstractMatrix, ϕq::AbstractVector, args...; n_MC, cor_ends, int_ϕq) ζP = int_ϕq(CA.getdata(ϕq))[Val(:μP)] ζMs = ϕm - n_θP, n_θMs = length(ζP), length(ζMs) + n_θP, n_θM = length(ζP), get_numberof_θM(approx,ζMs) + n_θMs = n_θM * size(ζMs,2) # intm_PMs_parfirst = !isnothing(intm_PMs_parfirst) ? intm_PMs_parfirst : begin # n_θM, n_site_batch = size(ζMs) # get_concrete(ComponentArrayInterpreter( @@ -603,7 +649,7 @@ function sample_ζresid_norm(approx::AbstractHVIApproximation, rng::Random.Abstr #urandn = _create_randn(rng, CA.getdata(ζP), n_MC, n_θP + n_θMs) #z = _create_randn(rng, CA.getdata(ζP), n_MC, n_θP) zP = _create_randn(rng, CA.getdata(ζP), n_MC, n_θP) - zMs = _create_randn(rng, CA.getdata(ζP), n_MC, n_θMs) + zMs = _create_randn(rng, CA.getdata(ζP), n_MC, n_θMs) # ζP only for type inference sample_ζresid_norm(approx, i_sites, zP, zMs, CA.getdata(ϕm), ϕq, args...; cor_ends, int_ϕq=get_concrete(int_ϕq) @@ -616,45 +662,77 @@ end Extract correlation matrix of a problem based on `MeanHVIApproximation`. At unconstrained parameter scale. """ -function get_hybridproblem_correlation_Ms(prob::AbstractHybridProblem; +function get_hybridproblem_correlation_Ms(prob::AbstractHybridProblem; xM = nothing, scenario = Val(())) UM = get_hybridproblem_cholesky_correlation_Ms(prob; xM, scenario) UM' * UM end -function get_hybridproblem_cholesky_correlation_Ms(prob::AbstractHybridProblem; +function get_hybridproblem_cholesky_correlation_Ms(prob::AbstractHybridProblem; xM = nothing, scenario = Val(())) ϕq = get_hybridproblem_ϕq(prob; scenario) cor_ends = get_hybridproblem_cor_ends(prob; scenario) + ϕm = if isnothing(xM) + Matrix{eltype(ϕq)}(undef,0,0) + else + g, ϕg0 = get_hybridproblem_MLapplicator(prob; scenario) + g(xM, prob.ϕg) # TODO separate state, avoid prob.ϕg + end + get_cholesky_correlation_Ms(prob.approx, ϕq, cor_ends.M, ϕm) +end + +function get_cholesky_correlation_Ms( + ::Union{MeanHVIApproximation,MeanHVIApproximationMat,MeanScalingHVIApproximation}, + ϕq::CA.ComponentVector, cor_ends_M, ϕm::AbstractMatrix=Matrix{eltype(ϕq)}[],) + # correlations only depend on globally optimized parameters ϕq.ρsM ρsM = ϕq[Val(:ρsM)] - UM = transformU_block_cholesky1(ρsM, cor_ends.M) + UM = transformU_block_cholesky1(ρsM, cor_ends_M) end -function sample_ζresid_norm(approx::MeanHVIApproximationMat, + +function sample_ζresid_norm( + approx::Union{AbstractMeanScalingHVIApproximation, MeanHVIApproximationMat}, i_sites, zP::AbstractMatrix, zMs::AbstractMatrix, ϕm::TM, ϕq::AbstractVector{T}; int_ϕq=get_concrete(ComponentArrayInterpreter(ϕq)), - cor_ends, - # assume to index into ϕq at the beginning, or provide those indices here + cor_ends ) where {T,TM<:AbstractMatrix{T}} - ζMs = ϕm - ϕuncc = ϕqc = int_ϕq(CA.getdata(ϕq)) + ϕqc = int_ϕq(CA.getdata(ϕq)) ζP = ϕqc[Val(:μP)] - n_θP, n_θMs, (n_θM, n_batch) = length(ζP), length(ζMs), size(ζMs) - # do not create a UpperTriangular Matrix of an AbstractGÜUArray in transformU_cholesky1 - ρsP = isempty(ϕuncc[Val(:ρsP)]) ? similar(ϕuncc[Val(:ρsP)]) : ϕuncc[Val(:ρsP)] # required by zygote + n_θP, n_θM, n_batch = length(ζP), get_numberof_θM(approx, ϕm), size(ϕm, 2) + + UM = get_cholesky_correlation_Ms(approx, ϕqc, cor_ends.M, ϕm) + # isempty conditional required by zygote + ρsP = isempty(ϕqc[Val(:ρsP)]) ? similar(ϕqc[Val(:ρsP)]) : ϕqc[Val(:ρsP)] UP = transformU_block_cholesky1(ρsP, cor_ends.P) - ρsM = isempty(ϕuncc[Val(:ρsM)]) ? similar(ϕuncc[Val(:ρsM)]) : ϕuncc[Val(:ρsM)] # required by zygote - # cholesky factor of the correlation: diag(UM' * UM) .== 1 - # coefficients ρsM can be larger than 1, still yielding correlations <1 in UM' * UM - UM = transformU_block_cholesky1(ρsM, cor_ends.M) - cf = ϕuncc[Val(:coef_logσ2_ζMs)] - logσ2_logMs = vec(cf[1, :] .+ cf[2, :] .* ζMs) - logσ2_ζP = vec(CA.getdata(ϕuncc[Val(:logσ2_ζP)])) - # CUDA cannot multiply BlockDiagonal * Diagonal, construct already those blocks - σMs = reshape(exp.(logσ2_logMs ./ 2), n_θM, :) - σP = exp.(logσ2_ζP ./ 2) + (;σP, σMs) = get_marginal_std(approx, ϕqc, ϕm) + + # # add 0 as last logσ2_par_offset-par in block + # logσ2_par_offsets_before_end = OneBasedVectorWithZero(ϕqc[Val(:logσ2_ζM_offsets)]) + # logσ2_par_offsets = logσ2_par_offsets_before_end[approx.idxs_par0] + # n_scale_blocks = length(approx.scalingblocks_ends) + # n_par = size(ϕm,1) - n_scale_blocks + # ζMs = ϕm[1:n_par,:] + # logσ2_sites_offset_blocks = logit.(ϕm[(n_par+1):end,:]) # (0..1)->(-Inf, +Inf), 0.5->0 + # ζP = ϕqc[Val(:μP)] + # n_θP, n_θMs, (n_θM, n_batch) = length(ζP), length(ζMs), size(ζMs) + # # do not create a UpperTriangular Matrix of an AbgeneraGÜUArray in transformU_cholesky1 + # # isempty conditional required by zygote + # ρsP = isempty(ϕqc[Val(:ρsP)]) ? similar(ϕqc[Val(:ρsP)]) : ϕqc[Val(:ρsP)] + # UP = transformU_block_cholesky1(ρsP, cor_ends.P) + # ρsM = isempty(ϕqc[Val(:ρsM)]) ? similar(ϕqc[Val(:ρsM)]) : ϕqc[Val(:ρsM)] + # # cholesky factor of the correlation: diag(UM' * UM) .== 1 + # # coefficients ρsM can be larger than 1, still yielding correlations <1 in UM' * UM + # UM = transformU_block_cholesky1(ρsM, cor_ends.M) + # # + # logσ2_site_offsets = logσ2_sites_offset_blocks[approx.idxs_repblocks,:] + # logσ2_ζMs = approx.logσ2_ζM_bases .+ logσ2_par_offsets .+ logσ2_site_offsets + # # + # logσ2_ζP = vec(CA.getdata(ϕqc[Val(:logσ2_ζP)])) + # # CUDA cannot multiply BlockDiagonal * Diagonal, construct already those blocks + # σMs = exp.(logσ2_ζMs ./ T(2)) + # σP = exp.(logσ2_ζP ./ T(2)) # BlockDiagonal does work with CUDA, but not with combination of Zygote and CUDA # need to construct full matrix for CUDA Uσ, diagUσ = _compute_choleskyfactor(UP, UM, σP, σMs, n_batch) # inferred only BlockDiagonal @@ -677,6 +755,33 @@ function sample_ζresid_norm(approx::MeanHVIApproximationMat, # ζ_resid, diagUσ end +""" +Get the marginal standard deviations of the covariance matrix, that usually +depends on ML predictions. + +Returns a NamedTuple with entries +- `σP`: vector of marginal standard deviations of population-level parameters +- `σMs`: `(n_θM, n_indiv)` matrix of marginal standard deviations of individual parameters +""" +function get_marginal_std(prob::AbstractHybridProblem, xM::AbstractMatrix; scenario) + g, ϕg0 = get_hybridproblem_MLapplicator(prob; scenario) + ϕg = prob.ϕg # TODO separate state and avoid prob.ϕg + ϕm = g(xM, ϕg) + get_marginal_std(prob.approx, prob.ϕq, ϕm) +end + +function get_marginal_std(::AbstractMeanHVIApproximation, + ϕqc::CA.ComponentVector, ϕm::AbstractMatrix=Matrix{eltype(ϕq)}[]) + ζMs = ϕm + n_θM = size(ζMs, 1) + cf = ϕqc[Val(:coef_logσ2_ζMs)] + logσ2_logMs = vec(cf[1, :] .+ cf[2, :] .* ζMs) + logσ2_ζP = vec(CA.getdata(ϕqc[Val(:logσ2_ζP)])) + σP = exp.(logσ2_ζP ./ 2) + σMs = reshape(exp.(logσ2_logMs ./ 2), n_θM, :) + (;σP, σMs) +end + """ Transforms each row of a matrix (n_MC x n_Par) with site parameters Ms inside n_Par of form (n_par x n_site) to Ms of the form (n_site x n_par), i.e. @@ -766,6 +871,9 @@ function _compute_choleskyfactor( B, diagUσ end +compute_cov(corU, σ) = Diagonal(σ) * (corU'*corU) * Diagonal(σ) +compute_invcov(corU, σ) = Diagonal(1 ./ σ) * (inv(corU) * inv(corU')) * Diagonal(1 ./ σ) + # TODO replace by KA.rand when it becomes available, see ones_similar # https://github.com/JuliaGPU/KernelAbstractions.jl/issues/488 function _create_randn(rng, ::AbstractVector{T}, dims...) where {T} @@ -782,8 +890,12 @@ Transform parameters and compute absolute of determinant of Jacobian of the tran function transform_and_logjac_ζ(ζP::AbstractVector, ζMs_tr::AbstractMatrix; transP::Bijectors.Transform, - transMs::StackedArray=StackedArray(transM, size(ζMs_tr, 1))) - θMs_tr, logjac_M = Bijectors.with_logabsdet_jacobian(transMs, ζMs_tr) + transMs::StackedArray=StackedArray(transM, size(ζMs_tr, 1)) + ) + # return all components, already reshaped by StackedArray + # need to sum across rows to summed get logdetjac for each site + θMs_tr, logjac_M_comps = with_logabsdet_jacobians(transMs, ζMs_tr) + logjac_Ms = sum(logjac_M_comps; dims = 2)[:,1] # if !all(isfinite.(θMs)) # @show θMs # @show ζMs @@ -797,14 +909,14 @@ function transform_and_logjac_ζ(ζP::AbstractVector, ζMs_tr::AbstractMatrix; θMs_tr = min.(sqrt(floatmax(eltype(θMs_tr))), θMs_tr) θMs_tr = max.(sqrt(floatmin(eltype(θMs_tr))), θMs_tr) θP, logjac_P = if isempty(ζP) - ζP, zero(logjac_M) + collect(ζP), zero(first(logjac_Ms)) # collect necessary for type stability else θP, logjac_P = Bijectors.with_logabsdet_jacobian(transP, ζP) θP = min.(sqrt(floatmax(eltype(θP))), θP) θP = max.(sqrt(floatmin(eltype(θP))), θP) θP, logjac_P end - θP, θMs_tr, logjac_P + logjac_M + θP, θMs_tr, logjac_P, logjac_Ms end """ diff --git a/src/elbo_scaling.jl b/src/elbo_scaling.jl new file mode 100644 index 0000000..6fd0f18 --- /dev/null +++ b/src/elbo_scaling.jl @@ -0,0 +1,29 @@ +# Similar to MeanHVIApproximationMat in elbo.jl +# but ML model predicts a scaling factor for a group of variance parameters +# ϕq element logσ2_ζM_offsets contains a vector of log-offsets, i.e. multipliers, +# for each block of ML scaled parameters +# the log-offset for the last entry in each block is stored in approx.logσ2_ζM_base + + +function get_marginal_std(approx::AbstractMeanScalingHVIApproximation, + ϕqc::CA.ComponentVector{T}, ϕm::AbstractMatrix=Matrix{eltype(ϕq)}[]) where T + # add 0 as last logσ2_par_offset-par in block + logσ2_par_offsets_before_end = OneBasedVectorWithZero(ϕqc[Val(:logσ2_ζM_offsets)]) + logσ2_par_offsets = logσ2_par_offsets_before_end[approx.idxs_par0] + n_scale_blocks = length(approx.scalingblocks_ends) + n_par = size(ϕm,1) - n_scale_blocks + ϕm_scalings = ϕm[(n_par+1):end,:] + logσ2_sites_offset_blocks = logit.(ϕm_scalings) # (0..1)->(-Inf, +Inf), 0.5->0 + logσ2_site_offsets = logσ2_sites_offset_blocks[approx.idxs_repblocks,:] + # + logσ2_ζMs = approx.logσ2_ζM_bases .+ logσ2_par_offsets .+ logσ2_site_offsets + logσ2_ζP = vec(CA.getdata(ϕqc[Val(:logσ2_ζP)])) + σMs = exp.(logσ2_ζMs ./ T(2)) + σP = exp.(logσ2_ζP ./ T(2)) + (;σP, σMs) +end + + + + + diff --git a/src/gencovar.jl b/src/gencovar.jl index 69206c8..637619f 100644 --- a/src/gencovar.jl +++ b/src/gencovar.jl @@ -3,27 +3,105 @@ Generate correlated covariates and synthetic true parameters that are a linear combination of the uncorrelated underlying principal factors and their binary combinations. -In addition provide a SimpleChains model of adequate complexity to -fit this relationship θMs_true = f(x_o) +In addition provide the true outputs θMs_true = f_true(x_o) """ function gen_cov_pred(rng::AbstractRNG, T::DataType, - n_covar_pc, n_covar, n_site, n_θM::Integer; - rhodec=8, is_using_dropout=false) - x_pc = rand(rng, T, n_covar_pc, n_site) - x_o = compute_correlated_covars(rng, x_pc; n_covar, rhodec) - # true model as a - # linear combination of uncorrelated base vectors and interactions - combs = Combinatorics.combinations(1:n_covar_pc, 2) - #comb = first(combs) - x_pc_comb = reduce(vcat, transpose.(map(combs) do comb - x_pc[comb[1], :] .* x_pc[comb[2], :] - end)) - x_pc_all = vcat(x_pc, x_pc_comb) - A = rand(rng, T, n_θM, size(x_pc_all, 1)) - θMs_true = A * x_pc_all - return (x_o, θMs_true) + n_covar_pc, n_covar, n_site, ζM; + scenario::Val{scen}, rhodec=8, is_using_dropout=false + ) where scen + n_ζM = length(ζM) + n_sites_cluster, clusters = get_clusters(n_site; scenario) + if any((:clustered_sites,:clustered_sites2) .∈ Ref(scen)) + # assuming all parameters log-transformed + # generate clusters of similar parameters and then back-compute the covariates + ζM_cl_center = map(x -> ζM .+ x, log.(T[0.8, 1.0, 1.2])) # cluster centers + # standard deviation of samples within cluster, 5% at original scale + σ = sqrt(log(T(1)+abs2(T(0.05)))) #ζM .* 0.02 + corrM = Matrix(T(1)*I, n_ζM, n_ζM) + corrM[1:(end-1), 2:end] .= corrM[2:end, 1:(end-1)] .= T(0.7) # correlated PBM parameters + # https://math.stackexchange.com/questions/3472602/how-to-create-covariance-matrix-when-correlation-matrix-and-stddevs-are-is-given + Sigma = PDMat(σ * σ' .* corrM) + dist = MultivariateNormal(Sigma) # problems with Zygote + #Udist = cholesky(Sigma) + #tmp = rand(rng, dist, 40) + #Udist.L * randn(n_ζM, 40) + # draw n_sites_cluster samples around those centers, with some noise + #Ainv = rand(rng, T, n_covar, n_ζM + sumn(n_ζM-1)) # original + combinations of 2 parameters + #Ainv = rand(rng, T, n_covar, 1 + sumn(n_ζM-1)) # first original + combinations of 2 parameters + hcat_ntuples = (t1, t2) -> Tuple(hcat(t1[i], t2[i]) for i in 1:length(t1)) + #i = 1 + xM, ζMs_true = mapreduce(hcat_ntuples, axes(n_sites_cluster,1)) do i + n_site_cl = n_sites_cluster[i] + ζMs_true_cl = ζM_cl_center[i] .+ rand(rng, dist, n_site_cl) + #ζMs_true_cl = ζM_cl_center[i] .+ Udist.L * randn(rng, n_ζM, n_site_cl) #rand(rng, dist, n_site_cl) + # generate a matrix that contains each combination of the scalar product of rows + ζMs_prod = reduce(vcat, transpose.(map(Combinatorics.combinations(1:n_ζM, 2)) do comb + ζMs_true_cl[comb[1], :] .* ζMs_true_cl[comb[2], :] + end)) + #xM_true = Ainv * vcat(ζMs_true_cl, ζMs_prod) + #xM_true = Ainv * vcat(ζMs_true_cl[1:1,:], ζMs_prod) # only provide the first parameter + xM_true = vcat(ζMs_true_cl[1:1,:], ζMs_prod) # only provide the first parameter + # need to add noise inputs to match covariate number + xM_noise = rand(rng, T, n_covar - size(xM_true, 1), n_site_cl) + if :exactML ∈ scen + xM = vcat(xM_true, xM_noise) + else + # add some noise to the covariates + xM = vcat( + xM_true .+ (T(0.05)*std(xM_true; dims=2)) .*randn(rng, T, size(xM_true)), + xM_noise) + end + # # generate correlated covariates by scaling the noise around the mean + # # does not work for clustered sites + # xM_mean = mean(xM_true_noise, dims=2) + # xM_resid_true = xM_true_noise .- xM_mean + # rhos=vcat(T(1.0), exp.(.-(1:(n_covar-1)) ./ T(rhodec))) + # #rhoM = repeat(rhos, 1, n_site_cl) + # noise = std(xM_resid_true, dims=2) .* randn(rng, T, size(xM_resid_true)) # noise to decorrelate + # xM_resid_cor = rhos .* xM_resid_true .+ (1 .- rhos) .* noise + # xM = xM_mean .+ xM_resid_cor + # cor(xM_true_noise[1, :], xM_true_noise[2, :]), cor(xM_true_noise[1, :], xM_true_noise[3, :]) , cor(xM_true_noise[1, :], xM_true_noise[4, :]) + # cor(xM[1, :], xM[2, :]), cor(xM[1, :], xM[3, :]) , cor(xM[1, :], xM[4, :]) + # cor(xM_resid_true[1, :], xM_resid_true[2, :]), cor(xM_resid_true[1, :], xM_resid_true[3, :]) , cor(xM_resid_true[1, :], xM_resid_true[4, :]) + # cor(xM_resid_cor[1, :], xM_resid_cor[2, :]), cor(xM_resid_cor[1, :], xM_resid_cor[3, :]) , cor(xM_resid_cor[1, :], xM_resid_cor[4, :]) , cor(xM_resid_cor[1, :], xM_resid_cor[5, :]) + # hcat_ntuples(t1,t2) + xM, ζMs_true_cl + end + else + x_pc = rand(rng, T, n_covar_pc, n_site) + xM = compute_correlated_covars(rng, x_pc; n_covar, rhodec) + # true model as a + # linear combination of uncorrelated base vectors and interactions + combs = Combinatorics.combinations(1:n_covar_pc, 2) + #comb = first(combs) + x_pc_comb = reduce(vcat, transpose.(map(combs) do comb + x_pc[comb[1], :] .* x_pc[comb[2], :] + end)) + x_pc_all = vcat(x_pc, x_pc_comb) + A = rand(rng, T, n_ζM, size(x_pc_all, 1)) + f_true = (x) -> A * x + ζMs_true0 = f_true(x_pc_all) + # center around mean with 10% relative error at original scale + σ = sqrt(log(T(1)+abs2(T(0.1)))) #ζM .* 0.1 + ζMs_true = scale_centered_at(ζMs_true0, ζM, fill(σ, size(ζMs_true0,1))) + end + return (; xM, ζMs_true, clusters) end +function get_clusters(n_site; scenario::Val{scen}) where scen + if any((:clustered_sites,:clustered_sites2) .∈ Ref(scen)) + n_sites_cluster = [30, n_site ÷ 4] + n_sites_cluster = vcat(n_sites_cluster, n_site .- sum(n_sites_cluster)) # ensure sum is n_site + clusters = vcat(fill.(1:length(n_sites_cluster), n_sites_cluster)...) + else + # each site one cluster + n_sites_cluster = fill(1, n_site) + clusters = 1:n_site + end + return n_sites_cluster, clusters +end + + """ Create `n_covar` correlated covariates from uncorrelated row-wise vector `x_pc`, @@ -46,6 +124,18 @@ function compute_correlated_covars(rng::AbstractRNG, x_pc::AbstractMatrix{T}; return x_o end +# function compute_correlated_noise(rng::AbstractRNG, n_covar, n_site; +# rhodec=8, +# rhos=vcat(T(1.0), exp.(.-(1:(n_covar-1)) ./ T(rhodec))) +# ) where {T} +# # add noise to decorrelate +# rhoM = repeat(rhos, 1, n_site) +# noise = randn(rng, T, n_covar, n_site) .* T(0.2) +# x_o = rhoM .* x_oc .+ (1 .- rhoM) .* noise +# return x_o +# end + + """ scale_centered_at(x, m, σrel=1.0) scale_centered_at(x, m, σ) diff --git a/src/gf.jl b/src/gf.jl index a7f0b28..be20b37 100644 --- a/src/gf.jl +++ b/src/gf.jl @@ -85,11 +85,11 @@ function predict_point_hvi(rng, prob::AbstractHybridProblem; scenario=Val(()), xM = isnothing(xM) ? xM_dl : xM xP = isnothing(xP) ? xP_dl : xP end - y_pred, θMs_tr, θP = gf(prob, xM, xP; scenario, gdevs, is_testmode, kwargs...) + y_pred, addq_pred, θMs_tr, θP = gf(prob, xM, xP; scenario, gdevs, is_testmode, kwargs...) pt = get_hybridproblem_par_templates(prob) θPc = ComponentArrayInterpreter(pt.θP)(θP) θMsc = ComponentArrayInterpreter((size(θMs_tr,1),), pt.θM)(θMs_tr) - (;y_pred, θMs_tr=θMsc, θP=θPc) + (;y_pred, addq_pred, θMs_tr=θMsc, θP=θPc) end @@ -122,6 +122,7 @@ function gf(prob::AbstractHybridProblem, xM::AbstractMatrix, xP::AbstractMatrix; f_dev = f end pt = get_hybridproblem_par_templates(prob; scenario) + n_θM = length(pt.θM) (; transP, transM) = get_hybridproblem_transforms(prob; scenario) transMs = StackedArray(transM, n_site_pred) intP = ComponentArrayInterpreter(pt.θP) @@ -135,22 +136,22 @@ function gf(prob::AbstractHybridProblem, xM::AbstractMatrix, xP::AbstractMatrix; # hence result is not type-inferred, but may test at this context res = is_infer ? Test.@inferred( gf( - g_dev, transMs, transP, f_dev, xM_dev, xP, ϕg_dev, ζP_dev, pbm_covar_indices; + g_dev, transMs, transP, f_dev, xM_dev, xP, ϕg_dev, n_θM, ζP_dev, pbm_covar_indices; cdev, kwargs...)) : - gf(g_dev, transMs, transP, f_dev, xM_dev, xP, ϕg_dev, ζP_dev, pbm_covar_indices; + gf(g_dev, transMs, transP, f_dev, xM_dev, xP, ϕg_dev, n_θM, ζP_dev, pbm_covar_indices; cdev, kwargs...) end -function gf(g::AbstractModelApplicator, transMs, transP, f, xM, xP, ϕg, ζP; +function gf(g::AbstractModelApplicator, transMs, transP, f, xM, xP, ϕg, n_θM, ζP; cdev, pbm_covars, intP = ComponentArrayInterpreter(ζP), kwargs...) pbm_covar_indices = intP(1:length(intP))[pbm_covars] - gf(g, transM, transP, f, xM, xP, ϕg, ζP, pbm_covar_indices; kwargs...) + gf(g, transM, transP, f, xM, xP, ϕg, n_θM, ζP, pbm_covar_indices; kwargs...) end -function gf(g::AbstractModelApplicator, transMs, transP, f, xM, xP, ϕg, ζP, +function gf(g::AbstractModelApplicator, transMs, transP, f, xM, xP, ϕg, n_θM, ζP, pbm_covar_indices::AbstractVector{<:Integer}; cdev, is_testmode) # @show first(xM,5) @@ -162,28 +163,26 @@ function gf(g::AbstractModelApplicator, transMs, transP, f, xM, xP, ϕg, ζP, # end #xMP = _append_PBM_covars(xM, intP(ζP), pbm_covars) xMP = _append_each_covars(xM, CA.getdata(ζP), pbm_covar_indices) - θMs_tr = gtrans(g, transMs, xMP, ϕg; cdev, is_testmode) + θMs_tr = gtrans(g, transMs, xMP, ϕg, n_θM; cdev, is_testmode) # transPM = RRuleMonitor("transP", ζP -> transP(ζP)) # θP = transPM(CA.getdata(ζP)) θP = transP(CA.getdata(ζP)) θP_cpu = cdev(θP) - y_pred = f(θP_cpu, θMs_tr, xP) + y_pred, addq_pred = f(θP_cpu, θMs_tr, xP) # fM = RRuleMonitor("f in gf", (θP_cpu) -> f(θP_cpu, θMs_tr, xP), DI.AutoForwardDiff()) # y_pred = fM(θP_cpu) # fM = RRuleMonitor("f in gf", (θP_cpu, θMs_tr) -> f(θP_cpu, θMs_tr, xP)) # y_pred = fM(θP_cpu, θMs_tr) # very slow large JvP with θMs_tr - return y_pred, θMs_tr, θP_cpu + return y_pred, addq_pred, θMs_tr, θP_cpu end """ composition transM ∘ g: transformation after machine learning parameter prediction Provide a `transMs = StackedArray(transM, n_batch)` """ -function gtrans(g, transMs, xMP, ϕg; cdev, is_testmode) - # TODO remove after removing gf - # predict the log of the parameters +function gtrans(g, transMs, xMP, ϕg, n_θM; cdev, is_testmode) ϕg = g(xMP, ϕg; is_testmode) - ζMs_tr = ϕg' + ζMs_tr = ϕg[1:n_θM,:]' # ignore the uncertainty-related parameters ζMs_tr_cpu = cdev(ζMs_tr) θMs_tr = transMs(ζMs_tr_cpu) if !all(isfinite.(θMs_tr)) @@ -229,12 +228,18 @@ function get_loss_gf(g, transM, transP, f, py, intP::AbstractComponentArrayInterpreter = ComponentArrayInterpreter( intϕ(1:length(intϕ)).ϕP); cdev=cpu_device(), + par_templates::NamedTuple, pbm_covars, n_site_batch, - floss_penalty = zero_penalty_loss, + penalty_computer = ZeroPenaltyComputer(), priorsP, priorsM, - is_omit_priors::Val = Val(false), - zero_prior_logdensity, - kwargs...) + is_omit_priors::Val{omit_priors} = Val(false), + intθP, intθMs, + frac_cluster_all, + kwargs...) where omit_priors + + pt = par_templates + zero_prior_logdensity = omit_priors ? zero(eltype(pt.θP)) : get_zero_prior_logdensity( + priorsP, priorsM, pt.θP, pt.θM) let g = g, transM = transM, transP = transP, f = f, intϕ = get_concrete(intϕ), @@ -243,11 +248,14 @@ function get_loss_gf(g, transM, transP, f, py, pbm_covar_indices = CA.getdata(intP(1:length(intP))[pbm_covars]), zero_prior_logdensity = zero_prior_logdensity, is_omit_priors = is_omit_priors, priorsP = priorsP, priorsM = priorsM, - floss_penalty = floss_penalty, + penalty_computer = penalty_computer, + intθMs = get_concrete(intθMs), intθP = get_concrete(intθP), + frac_cluster_all = convert.(eltype(pt.θP),frac_cluster_all), + n_θM = length(priorsM), cpu_dev = cpu_device() # real cpu, different form infer_cdev(gdevs) that maybe idenetity #, intP = get_concrete(intP) #inv_transP = inverse(transP), kwargs = kwargs - function loss_gf(ϕ, xM, xP, y_o, y_unc, i_sites; is_testmode) + function loss_gf(ϕ::AbstractVector{T}, xM, xP, y_o, y_unc, i_sites; is_testmode) where T ϕc = intϕ(ϕ) # GPUArraysCore.allowscalar(() -> if !all(isfinite.(ϕ)) # @show ϕc.ϕP @@ -265,31 +273,40 @@ function get_loss_gf(g, transM, transP, f, py, @show ϕc.ϕP #Main.@infiltrate_main end - y_pred, θMs_tr_pred, θP_pred = gf( - g, transMs, transP, f, xM, xP, CA.getdata(ϕc.ϕg), CA.getdata(ϕc.ϕP), + y_pred, addq_pred, θMs_tr_pred, θP_pred = gf( + g, transMs, transP, f, xM, xP, CA.getdata(ϕc.ϕg), n_θM, + CA.getdata(ϕc.ϕP), pbm_covar_indices; cdev, is_testmode, kwargs...) + # TODO check computation + frac_cluster = frac_cluster_all[i_sites] #σ = exp.(y_unc ./ 2) #nLy = sum(abs2, (y_pred .- y_o) ./ σ) nLy = py(y_o, y_pred, y_unc) # logpdf is not typestable for Distribution{Univariate, Continuous} - logpdf_t = (prior, θ) -> logpdf(prior, θ)::eltype(θP_pred) - logpdf_tv = (prior, θ::AbstractVector) -> begin - map(Base.Fix1(logpdf, prior), θ)::Vector{eltype(θP_pred)} - end - neg_log_prior = + # logpdf_t = (prior, θ) -> logpdf(prior, θ)::eltype(θP_pred) + # logpdf_tv = (prior, θ::AbstractVector) -> begin + # map(Base.Fix1(logpdf, prior), θ)::Vector{eltype(θP_pred)} + # end + nLprior_P, nLprior_Ms = # @descend_code_warntype ( compute_priors_logdensity(priorsP, priorsM, θP_pred, θMs_tr_pred, is_omit_priors, zero_prior_logdensity) - if !isfinite(neg_log_prior) + nLprior_M = sum(nLprior_Ms .* frac_cluster) + if !isfinite(nLprior_P) || !isfinite(nLprior_M) @info "loss_gf: encountered non-finite prior density" @show θP_pred, θMs_tr_pred, ϕc.ϕP error("debug get_loss_gf") end ϕq = eltype(θP_pred)[] # no uncertainty parameters optimized - loss_penalty = floss_penalty(y_pred, θMs_tr_pred, θP_pred, ϕc.ϕg, ϕq) + loss_penalties = first(compute_penalty(penalty_computer, + y_pred, addq_pred, intθMs(θMs_tr_pred), intθP(θP_pred), + i_sites, ϕc.ϕg, ϕq)) + #loss_penalty = sum(loss_penalties .* frac_cluster) + loss_penalty = sum(loss_penalties) #@show nLy, neg_log_prior, loss_penalty - nLjoint_pen = nLy + neg_log_prior + loss_penalty - return (;nLjoint_pen, y_pred, θMs_tr_pred, θP_pred, nLy, neg_log_prior, loss_penalty) + nLjoint_pen = nLy + nLprior_P + nLprior_M + loss_penalty + return (;nLjoint_pen, y_pred, θMs_tr_pred, θP_pred, nLy, nLprior_P, + nLprior_M, loss_penalty) end end end @@ -305,3 +322,4 @@ end # end # end # end + diff --git a/src/init_hybrid_params.jl b/src/init_hybrid_params.jl index c429e5a..d5108a1 100644 --- a/src/init_hybrid_params.jl +++ b/src/init_hybrid_params.jl @@ -94,7 +94,11 @@ Arguments: - `ρ0`: default entry for ρsP and ρsM, defaults = 0f0. - `coef_logσ2_logM`: default column for `coef_logσ2_ζMs`, defaults to `[-10.0, 0.0]` -Returns a `ComponentVector` of +Returns a Tuple of +- `ϕqc::ComponentVector`: parameters of the posterior approximation +- `approx`: possibly updated Approximation + +For MeanHVIApproximation, `ϕqc` contains components - `logσ2_ζP`: vector of log-variances of ζP (on log scale). defaults to -10 - `coef_logσ2_ζMs`: offset and slope for the log-variances of ζM scaling with @@ -121,7 +125,7 @@ function init_hybrid_ϕunc( coef_logσ2_ζMs, ρsP, ρsM) - ca = CA.ComponentVector(;nt...)::CA.ComponentVector + (; ϕqc = CA.ComponentVector(;nt...)::CA.ComponentVector, approx) end function init_hybrid_ϕunc( @@ -149,7 +153,7 @@ function init_hybrid_ϕunc( logσ2_ζMs, ρsP, ρsM) - ca = CA.ComponentVector(;nt...)::CA.ComponentVector + (; ϕqc = CA.ComponentVector(;nt...)::CA.ComponentVector, approx) end function compute_σ_unconstrained(transM::Stacked, θM, rel_err) @@ -175,3 +179,48 @@ end # end # end + +function init_hybrid_ϕunc( + approx::SApp, + cor_ends::NamedTuple, + ρ0::FT = 0.0f0, + logσ2_ζMs::AbstractMatrix{FT} = Array{FT}(undef, 0, 0), + logσ2_ζP::AbstractVector{FT} = fill(FT(-10.0), cor_ends.P[end]), + ρsP = fill(ρ0, get_cor_count(cor_ends.P)), + ρsM = fill(ρ0, get_cor_count(cor_ends.M)); + transM, + θM::CA.ComponentVector, + n_site::Integer, + relerr = 0.01, +) where {FT, SApp <: MeanScalingHVIApproximation} + logσ2 = if isempty(logσ2_ζMs) + # relative error of the template of θM + σ = compute_σ_unconstrained(transM, CA.getdata(θM), relerr) + logσ2 = FT(2) * log.(convert.(FT,σ)) + else + error("check and implement inferring median logσ2 from logσ2_ζMs") + median(logσ2_ζMs; dims=1) + end + is_end = approx.scalingblocks_ends # abbreviation + # update logσ2_ζM_base of last parameter in approx - its not calibrated + approx = SApp(approx; logσ2_ζM_base = logσ2[is_end]) + is_par_offset = range.(vcat(1,is_end[1:(end-1)]),(is_end .- 1)) # excluding last parameter + length_scale_blocks = vcat(first(is_end), diff(is_end)) + is_par_offset = range.((is_end .- length_scale_blocks .+ 1),(is_end .- 1)) # excluding last parameter + # need to provide plain vector and sort out positions in apply to satisfy Zygote + # logσ2_ζM_offsets = map(is_end, is_offset) do i_end, is_offset + # logσ2[is_offset] .- logσ2[i_end] + # end + logσ2_ζM_offsets_gen = (logσ2[is_offset] .- logσ2[i_end] for + (i_end, is_offset) in zip(is_end, is_par_offset)) + #collect(logσ2_ζM_offsets_gen) + logσ2_ζM_offsets = vcat(logσ2_ζM_offsets_gen...) + #tmp = CA.ComponentVector(;zip(Symbol.(axes(is_offset,1)), logσ2_ζM_offsets_gen)...) + nt = (; + logσ2_ζP, + logσ2_ζM_offsets, + ρsP, + ρsM) + (; ϕqc = CA.ComponentVector(;nt...)::CA.ComponentVector, approx) +end + diff --git a/src/logden_normal.jl b/src/logden_normal.jl index 1655383..1d2c0b3 100644 --- a/src/logden_normal.jl +++ b/src/logden_normal.jl @@ -71,7 +71,7 @@ end # end -entropy_MvNormal(K, logdetΣ) = (K * log(2 * π * ℯ) + logdetΣ) / 2 +entropy_MvNormal(K::Integer, logdetΣ::T) where T = (T(K) * log(T(2) * T(π) * T(ℯ)) + logdetΣ) / T(2) # compiler figures out log(2 * π * ℯ) already, no need to tinker #entropy_MvNormal(K, logdetΣ) = (K * (1 + log(2π)) + logdetΣ) / 2 entropy_MvNormal(Σ) = entropy_MvNormal(size(Σ, 1), logdet(Σ)) diff --git a/src/util.jl b/src/util.jl index ff898d5..1022de1 100644 --- a/src/util.jl +++ b/src/util.jl @@ -34,3 +34,87 @@ function vectuptotupvec_allowmissing( allowmissing(passmissing(getindex).(vectup, i))::Vector{Tim[i]} end, npar) end +function vectuptotupvec(vecntup::AbstractVector{<:NamedTuple{KEYS}}) where KEYS + #vectup = values.(vecntup) + Ti = eltype(vecntup).parameters[2].parameters + npar = length(Ti) + tupvec = ntuple(i -> + (getindex.(vecntup, i))::Vector{Ti[i]}, npar) + NamedTuple{KEYS}(tupvec) +end +# function vectuptotupvec_(vecntup::AbstractVector{<:NamedTuple}) +# vectup = values.(vecntup) +# tupvec = vectuptotupvec(vectup) +# NamedTuple{keys(first(vecntup))}(tupvec) +# end + + +""" + take_n!(itr, n) + +Peel off the first `n` elements of an drop-iterator `itr` and +return them as a vector, while mutating `itr` to now start after those `n` elements. + +# Examples +```jldoctest; output=false +it = HybridVariationalInference.drop_iterate(1:5) # initialize the iterator + +a1 = HybridVariationalInference.take_n!(it,3) +collect(a1) == [1,2,3] + +a2 = HybridVariationalInference.take_n!(it,3) +collect(a2) == [4,5] # only two element left, so return those + +a3 = HybridVariationalInference.take_n!(it,3) +collect(a3) == [] # no elements left, so return empty vector +# output +true +``` +""" +function take_n!(itr::Base.RefValue{<:Base.Iterators.Drop},n) + ans = Iterators.take(itr[], n) + itr[] = Iterators.drop(itr[], n) + ans +end +drop_iterate(x) = Ref(Iterators.drop(x,0)) + +""" + insert_zeros(v, positions) + +Return a new vector with `zero(eltype(v))` inserted at each position in `positions`. +Positions are applied in order against the growing vector (as if sequential inserts), +so later indices are interpreted on the updated result. +Only one output vector is allocated. +""" +function insert_zeros(v::AbstractVector, positions::AbstractVector{<:Integer}) + # does not work with Zygote, but its only used to create the indexing vector + # v = [10,20,30];positions = [2, 5] # means insert zeros before original v[2] and v[4], so final output has zeros at those positions. + @assert length(v) + length(positions) == positions[end] "The last position in `positions` must be equal to the final length of the output vector after all insertions." + dpos1 = diff(positions) .- 1 + @assert all(dpos1 .>= 0) "Positions must be in strictly ascending order." + # length of blocks before insert is diff(positions) -1 + length_blocks_beforeinsert = Iterators.flatten((first(positions) .- 1, dpos1)) + #collect(length_blocks_beforeinsert) == [1,2] + it = drop_iterate(v) # to allow take_n! + #collect(HVI.take_n!(it, 4)) == v + gen = (Iterators.flatten( + (take_n!(it, l), zero(eltype(v)))) for l in length_blocks_beforeinsert) + # collect(Iterators.flatten(gen)) == [10, 0, 20, 30, 0] + return collect(Iterators.flatten(gen)) +end + +function ChainRulesCore.rrule(::typeof(insert_zeros), v::AbstractVector, positions::AbstractVector{<:Integer}) + y = insert_zeros(v, positions) + # Reverse pass (pullback) for gradient of `insert_zeros`: + # - We only propagate gradients into `v`. + # - `positions` is treated as non-differentiable (NoTangent()). + # We ignore the gradients for the positions, where zero was inserted + # Otherwise, we just need to extract the corresponding positions in ȳ + function pullback(ȳ) + n = length(v) + m = length(positions) + grad_v = OneBasedVectorWithZero(ȳ[:])[1:(n+m) .∉ Ref(positions)] + return NoTangent(), grad_v, NoTangent() + end + return y, pullback +end diff --git a/src/util_ca.jl b/src/util_ca.jl index 6d4989b..f173638 100644 --- a/src/util_ca.jl +++ b/src/util_ca.jl @@ -63,4 +63,16 @@ axis_length(ax::CA.UnitRange) = length(ax) axis_length(ax::CA.ShapedAxis) = length(ax) axis_length(ax::CA.Shaped1DAxis) = length(ax) +""" + as_data_frame(cm::CA.ComponentMatrix) + as_data_frame(cm::CA.ComponentArray{T,3}) + as_data_frame(cm::CA.ComponentArray{T,4}) +Converts a ComponentMatrix with scalar keys in first or second dimension to a DataFrame. +If keys are in first column, the result corresponds to transposing the first +two dimensions. +With arrays of higher dimension, columns dim3 and dim4 are added that report +the index in this dimension. +""" +function as_data_frame end +# in ext/HybridVariationalInferenceDataFramesExt.jl to avoid DataFrames dependency diff --git a/test/Project.toml b/test/Project.toml index c78503d..a9f2606 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -4,6 +4,7 @@ Bijectors = "76274a88-744f-5084-9051-94815aaf08c4" BlockDiagonals = "0a1fb500-61f7-11e9-3c65-f5ef3456f9f0" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" DistributionFits = "45214091-1ed4-4409-9bcf-fdb48a05e921" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" @@ -17,7 +18,9 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" Lux = "b2108857-7c20-44ae-9111-449ecde12c47" MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" +Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" OptimizationOptimisers = "42dfb2eb-d2b4-4451-abcd-913932933ac1" +PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f" SimpleChains = "de6bee2f-e2f4-4ec7-b6ed-219cc6f6e9e5" diff --git a/test/runtests.jl b/test/runtests.jl index 41e0902..2495c47 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -3,12 +3,12 @@ const GROUP = get(ENV, "GROUP", "All") # defined in in CI.yml @time begin if GROUP == "All" || GROUP == "Basic" + #@safetestset "test" include("test/test_util.jl") + @time @safetestset "test_util" include("test_util.jl") #@safetestset "test" include("test/test_RRuleMonitor.jl") @time @safetestset "test_RRuleMonitor" include("test_RRuleMonitor.jl") #@safetestset "test" include("test/test_bijectors_utils.jl") @time @safetestset "test_bijectors_utils" include("test_bijectors_utils.jl") - #@safetestset "test" include("test/test_util.jl") - @time @safetestset "test_util" include("test_util.jl") #@safetestset "test" include("test/test_util_ca.jl") @time @safetestset "test_util_ca" include("test_util_ca.jl") #@safetestset "test" include("test/test_util_gpu.jl") diff --git a/test/test_HybridProblem.jl b/test/test_HybridProblem.jl index 7e55e63..637cea7 100644 --- a/test/test_HybridProblem.jl +++ b/test/test_HybridProblem.jl @@ -28,6 +28,7 @@ cdev = cpu_device() #scenario = Val((:MeanHVIApproxMat,)) #scenario = Val((:covarK2,)) #scen = CP._val_value(scenario) +#scenario = Val((:clustered_sites, )) function construct_problem(; scenario::Val{scen}) where scen FT = Float32 @@ -44,9 +45,8 @@ function construct_problem(; scenario::Val{scen}) where scen CA.getdata(θc[par])::ET end local y = r0 .+ r1 .* x.S1 ./ (K1 .+ x.S1) .* x.S2 ./ (K2 .+ x.S2) - return (y) + return (y, y[1:0]) end - n_out = length(θM) rng = StableRNG(111) # n_batch = 10 n_site, n_batch = get_hybridproblem_n_site_and_batch(CP.DoubleMM.DoubleMMCase(); scenario) @@ -57,8 +57,16 @@ function construct_problem(; scenario::Val{scen}) where scen i_test = n_site .+ (1:n_site_test) test_data = (; xM = xM[:, i_test], xP = xP[:, i_test], y_true = y_true[:, i_test], y_o = y_o[:, i_test], y_unc = y_unc[:, i_test]) - n_covar = size(xM,1) + approx = if (:scalingall ∈ scen) + MeanHVIApproximationMat([length(θM)]) + elseif (:MeanHVIApproxBlocks ∈ scen) + MeanHVIApproximationMat() + else + MeanHVIApproximationMat() + end + n_covar = size(xM,1) n_input = (:covarK2 ∈ scen) ? n_covar +1 : n_covar + n_out = get_numberof_MLinputs(approx, θM) g_chain = SimpleChain( static(n_input), # input dimension (optional) # dense layer with bias that maps to 8 outputs and applies `tanh` activation @@ -95,16 +103,11 @@ function construct_problem(; scenario::Val{scen}) where scen f_batch = PBMSiteApplicator( f_doubleMM; θP, θM, θFix=CA.ComponentVector{FT}(), xPvec=xP[:,1]) - ϕunc0 = init_hybrid_ϕunc(MeanHVIApproximation(), cor_ends, zero(FT); θM, transM, n_site) - ϕq = CP.update_μP_by_θP(ϕunc0, θP, transP) - approx = if (:MeanHVIApproxBlocks ∈ scen) - MeanHVIApproximation() - else - MeanHVIApproximationMat() - end + (; ϕqc, approx) = init_hybrid_ϕunc(approx, cor_ends, zero(FT); θM, transM, n_site) + ϕq = CP.update_μP_by_θP(ϕqc, θP, transP) HybridProblem(θM, ϕq, g_chain_scaled, ϕg0, f_batch, priors_dict, py, - transM, transP, train_dataloader, test_data, n_covar, n_site, n_batch; + transM, transP, train_dataloader, test_data, n_site, n_batch; cor_ends, pbm_covars, approx, #ϕunc0, ) @@ -151,7 +154,10 @@ test_without_flux = (scenario) -> begin #----------- fit g and θP to y_o rng = StableRNG(111) g, ϕg0 = get_hybridproblem_MLapplicator(prob; scenario) + pt = get_hybridproblem_par_templates(prob; scenario) n_site, n_batch = get_hybridproblem_n_site_and_batch(prob; scenario) + n_sites_cluster, clusters = CP.get_clusters(n_site; scenario) + frac_cluster_all = 1 ./ n_sites_cluster[clusters] train_loader = get_hybridproblem_train_dataloader(prob; scenario) (xM, xP, y_o, y_unc, i_sites) = first(train_loader) f = get_hybridproblem_PBmodel(prob; scenario) @@ -167,12 +173,14 @@ test_without_flux = (scenario) -> begin priorsM = Tuple(priors[k] for k in keys(par_templates.θM)) # slightly disturb θP_true p = p0 = vcat(ϕg0, par_templates.θP .* convert(eltype(ϕg0), 0.8)) + intθP = ComponentArrayInterpreter(pt.θP) + intθMs = ComponentArrayInterpreter((n_batch,), pt.θM) # Pass the site-data for the batches as separate vectors wrapped in a tuple - zero_prior_logdensity = CP.get_zero_prior_logdensity( - priorsP, priorsM, par_templates.θP, par_templates.θM) loss_gf = get_loss_gf(g, transM, transP, f, py, intϕ; - pbm_covars, n_site_batch = n_batch, priorsP, priorsM, zero_prior_logdensity, + par_templates = pt, + pbm_covars, n_site_batch = n_batch, priorsP, priorsM, + intθMs, intθP, frac_cluster_all, ) (_xM, _xP, _y_o, _y_unc, _i_sites) = first(train_loader) #l1 = loss_gf(p0, _xM, _xP, _y_o, _y_unc, _i_sites; is_testmode = false) @@ -183,6 +191,7 @@ test_without_flux = (scenario) -> begin # @descend_code_warntype ( loss_gf(p0, _xM, _xP, _y_o, _y_unc, _i_sites; is_testmode = true) ) + @test typeof(l1[1]) == eltype(p0) tld = first(train_loader) gr = Zygote.gradient(p -> loss_gf(p, tld...; is_testmode = false)[1], CA.getdata(p0)) @test gr[1] isa Vector @@ -198,6 +207,7 @@ test_without_flux = (scenario) -> begin callback = callback_loss(100), optprob, Adam(0.02), epochs = 150); loss_gf_sites = get_loss_gf(g, transM, transP, f, intϕ; + par_templates = pt, pbm_covars, n_site_batch = n_site) l1, y_pred, θMs_pred, θP, nLy, neg_log_prior = loss_gf_sites( res.u, train_loader.data...) @@ -210,6 +220,7 @@ end #scenario=Val((:default,)) test_without_flux(Val((:default,))) test_without_flux(Val((:covarK2,))) +test_without_flux(Val((:clustered_sites,))) import CUDA, cuDNN using GPUArraysCore @@ -263,7 +274,7 @@ test_with_flux = (scenario) -> begin @test θP.r0 < 1.5 * θPt.r0 @test exp(ϕ.ϕq.μP.K2) == θP.K2 < 1.5 * θP.K2 n_sample_pred = 12 - (; y, θsP, θsMs_tr, entropy_ζ) = predict_hvi(rng, probo; scenario, n_sample_pred); + (; y, addq, θsP, θsMs_tr, entropy_ζ) = predict_hvi(rng, probo; scenario, n_sample_pred); _,_,y_obs,_ = get_hybridproblem_train_dataloader(prob; scenario).data @test size(y) == (size(y_obs)..., n_sample_pred) yc = cdev(y) @@ -329,7 +340,7 @@ test_with_flux_gpu = (scenario) -> begin @test probo.ϕq == cdev(ϕ.ϕq) # predict using problem and its associated dataloader n_sample_pred = 201 - (; y, θsP, θsMs_tr) = predict_hvi(rng, probo; scenario = scenf, n_sample_pred); + (; y, addq, θsP, θsMs_tr) = predict_hvi(rng, probo; scenario = scenf, n_sample_pred); # to inspect correlations among θP and θMs_tr construct ComponentVector # TODO redo get_int_PMst_site # get_ca_int_PMs = let @@ -383,7 +394,7 @@ test_with_flux_gpu = (scenario) -> begin ); @test resopt.u isa GPUArraysCore.AbstractGPUVector n_sample_pred = 11 - (; y, θsP, θsMs_tr) = predict_hvi( + (; y, addq, θsP, θsMs_tr) = predict_hvi( rng, probo; scenario = scenf, n_sample_pred,is_inferred = Val(true)); # @test cdev(ϕ.ϕq.ρsM)[1] > 0 # too few iterations end; diff --git a/test/test_ModelApplicator.jl b/test/test_ModelApplicator.jl index 70521e6..55d86ac 100644 --- a/test/test_ModelApplicator.jl +++ b/test/test_ModelApplicator.jl @@ -25,12 +25,26 @@ end; @test y == c1 .* m end; +@testset "MagnitudeModelApplicator subset" begin + app = NullModelApplicator() + c1 = CA.ComponentVector(a = (a1 = 1, a2 = 2:3), b = 3:4) + range_scaled = 2:3 + m = 2 + g = MagnitudeModelApplicator(app, m; range_scaled) + y = g(c1, eltype(m)[]) + @test y[range_scaled] == c1[range_scaled] .* m + @test y[1:end .∉ Ref(range_scaled)] == c1[1:end .∉ Ref(range_scaled)] + ym = g(hcat(c1,c1 .* 2),eltype(m)[]) # transforming matrix + @test ym[:,1] == y + @test ym[:,2] == y .* 2 +end; + @testset "NormalScalingModelApplicator" begin app = NullModelApplicator() r = logistic.(randn(5)) # 0..1 σ = fill(2.0, 5) μ = collect(exp.(1.0:5.0)) # different magnitudes - g = NormalScalingModelApplicator(app, μ, σ) + g = NormalScalingModelApplicator(app, μ, σ, 1:0) y = g(r, eltype(μ)[]) p = normcdf.(μ, σ, y) #hcat(r, p) @@ -45,6 +59,38 @@ end; end end; +@testset "NormalScalingModelApplicator subset" begin + app = NullModelApplicator() + r = logistic.(randn(10)) # 0..1 + r2 = logistic.(randn(10)) # 0..1 + σ = fill(2.0, 5) + μ = collect(exp.(1.0:5.0)) # different magnitudes + range_scaled = 2 .+ (1:length(σ)) + g = NormalScalingModelApplicator(app, μ, σ, range_scaled) + y = g(r, eltype(μ)[]) + p = normcdf.(μ, σ, y[range_scaled]) + #hcat(r, p) + @test p ≈ r[range_scaled] + @test y[1:end .∉ Ref(range_scaled)] == r[1:end .∉ Ref(range_scaled)] + rm = hcat(r, r2) + ym = g(rm, eltype(μ)[]) + @test ym[:,1] == y + p2 = normcdf.(μ, σ, ym[range_scaled,2]) + @test p2 ≈ r2[range_scaled] + @test ym[1:end .∉ Ref(range_scaled),2] == r2[1:end .∉ Ref(range_scaled)] + #cdev = cpu_device() + if gdev isa MLDataDevices.AbstractGPUDevice + g_gpu = g |> gdev + @test g_gpu.μ isa GPUArraysCore.AbstractGPUArray + r_gpu = r |> gdev + rm_gpu = rm |> gdev + y = g_gpu(r_gpu, eltype(g_gpu.μ)[]) + @test y isa GPUArraysCore.AbstractGPUArray + ym = g_gpu(rm_gpu, eltype(g_gpu.μ)[]) + @test ym isa GPUArraysCore.AbstractGPUArray + end +end; + @testset "RangeScalingModelApplicator" begin app = NullModelApplicator() r = logistic.(randn(Float32, 5)) # 0..1 @@ -56,6 +102,10 @@ end; @test y ≈(r .* width .+ lowers) @test eltype(y) == eltype(r) #cdev = cpu_device() + rm = hcat(r, r .* 2) + ym = g(rm, []) + @test ym[:,1] == y + @test ym[:,2] ≈(rm[:,2] .* width .+ lowers) if gdev isa MLDataDevices.AbstractGPUDevice g_gpu = g |> gdev @test g_gpu.offset isa GPUArraysCore.AbstractGPUArray @@ -66,3 +116,37 @@ end; @test cdev(y_dev) ≈ y end end; + +@testset "RangeScalingModelApplicator subset" begin + app = NullModelApplicator() + r = logistic.(randn(Float32, 10)) # 0..1 + lowers = collect(exp.(1.0:5.0)) # different magnitudes + uppers = lowers .* 2 + range_scaled = 2 .+ (1:length(lowers)) + g = RangeScalingModelApplicator(app, lowers, uppers, eltype(r); range_scaled) + y = @inferred g(r, []) + width = uppers .- lowers + @test y[range_scaled] ≈ (r[range_scaled] .* width .+ lowers) + @test eltype(y) == eltype(r) + @test y[1:end .∉ Ref(range_scaled)] == r[1:end .∉ Ref(range_scaled)] + #cdev = cpu_device() + rm = hcat(r, r .* 2) + ym = g(rm, []) + @test ym[:,1] == y + @test ym[range_scaled,2] ≈(rm[range_scaled,2] .* width .+ lowers) + @test ym[1:end .∉ Ref(range_scaled),2] == rm[1:end .∉ Ref(range_scaled),2] + if gdev isa MLDataDevices.AbstractGPUDevice + g_gpu = g |> gdev + @test g_gpu.offset isa GPUArraysCore.AbstractGPUArray + @test g_gpu.width isa GPUArraysCore.AbstractGPUArray + r_gpu = r |> gdev + y_dev = g_gpu(r_gpu, []) + @test y_dev isa GPUArraysCore.AbstractGPUArray + @test cdev(y_dev) ≈ y + rm_gpu = rm |> gdev + ym_dev = g_gpu(rm_gpu, []) + @test y_dev isa GPUArraysCore.AbstractGPUArray + @test cdev(ym_dev) ≈ ym + end +end; + diff --git a/test/test_cholesky_structure.jl b/test/test_cholesky_structure.jl index 1d98b49..c2767cf 100644 --- a/test/test_cholesky_structure.jl +++ b/test/test_cholesky_structure.jl @@ -2,6 +2,7 @@ using LinearAlgebra, Test using HybridVariationalInference using HybridVariationalInference: HybridVariationalInference as CP using Zygote +import Optimization using OptimizationOptimisers using ComponentArrays: ComponentArrays as CA #using SymmetricFormats diff --git a/test/test_doubleMM.jl b/test/test_doubleMM.jl index 2684f05..6c147b1 100644 --- a/test/test_doubleMM.jl +++ b/test/test_doubleMM.jl @@ -11,6 +11,7 @@ using SimpleChains using MLUtils import Zygote +import Optimization using OptimizationOptimisers using MLDataDevices @@ -23,10 +24,13 @@ cdev = cpu_device() prob = DoubleMM.DoubleMMCase() scenario = Val((:default,)) + + #using Flux #scenario = Val((:use_Flux,)) #scenario = Val((:use_Flux,:f_on_gpu)) +n_site, n_batch = get_hybridproblem_n_site_and_batch(prob; scenario) par_templates = get_hybridproblem_par_templates(prob; scenario) @testset "get_hybridproblem_priors" begin @@ -36,18 +40,85 @@ par_templates = get_hybridproblem_par_templates(prob; scenario) @test quantile(priors[:K2], 0.95) ≈ θall.K2 * 3 # fitted in f_doubleMM end +@testset "gen_hybridproblem_synthetic clustered_sites" begin + scenario = Val((:clustered_sites,:exactML)) + scenario = Val((:clustered_sites,)) + par_templates = get_hybridproblem_par_templates(prob; scenario) + rng = StableRNG(111) # make sure to be the same as when constructing train_dataloader + (; xM, θP_true, θMs_true, xP, y_true, y_o, y_unc, + ) = gen_hybridproblem_synthetic(rng, prob; scenario); + @test eltype(xM) == eltype(θP_true) == eltype(θMs_true) == Float32 + n_sites_cluster, clusters = CP.get_clusters(n_site; scenario) + θM_cl_center = map(x -> CA.getdata(par_templates.θM) .* x, [0.8, 1.0, 1.2]) + #(; transP, transM) = get_hybridproblem_transforms(prob; scenario) + #ζM_cl_center = Ref(inverse(transM)(CA.getdata(par_templates.θM))) .* [0.8, 1.0, 1.2] # cluster centers + # i = 1 # i = 2 # i = 3 + for i in 1:length(n_sites_cluster) + #@show i + local i_sites_i = findall(clusters .== i) + @test all(isapprox.( + vec(mean(CA.getdata(θMs_true[:,i_sites_i]); dims = 2)), θM_cl_center[i], rtol = 0.05)) + @test all(isapprox.(vec(std(CA.getdata(θMs_true[:,i_sites_i]); dims = 2)) ./ θM_cl_center[i], + 0.05, rtol = 0.5)) + end + @test size(xP) == (16, n_site) + @test size(y_o) == (8, n_site) + # test same results for same rng + rng2 = StableRNG(111) + gen2 = gen_hybridproblem_synthetic(rng2, prob; scenario) + @test gen2.y_o == y_o + # @usingany UnicodePlots + # histogram(θMs_true[1,:], nbins = 30) + # histogram(xM[1,:], nbins = 30) + # histogram(y_o[1,:], nbins = 30) + # histogram(y_o[4,:], nbins = 30) # only vague pattern of clustering in obs + # histogram(y_o[8,:], nbins = 30) +end + +() -> begin + # fit a neural network to predict the parameters of the clusters from the covariates + # and test, if covariates hold enough information to predict the parameters + g, ϕg0 = get_hybridproblem_MLapplicator(prob; scenario) + ϕg = ϕg0 + transMs = StackedArray(get_hybridproblem_transforms(prob; scenario).transM, n_site) + ζMs_true = inverse(transMs)(θMs_true) + flossg = (ϕg) -> begin + ζMs = g(xM, ϕg) # predict the parameters on unconstrained space + # θMs_tr = transMs(ζMs) + # loss = sum(abs2, θMs_tr' .- θMs_true) + loss = sum(abs2, ζMs .- ζMs_true) + return loss + end + optf = Optimization.OptimizationFunction((ϕg, p) -> flossg(ϕg), Optimization.AutoZygote()) + optprob = Optimization.OptimizationProblem(optf, ϕg0) + tmp = solve(optprob, Adam(0.02), maxiters = 1600) + ϕg = tmp.u + ζMs = g(xM, ϕg) # predict the parameters on unconstrained space + # @usingany UnicodePlots + scatterplot(vec(ζMs_true[1,:]), vec(ζMs[1,:])) + scatterplot(vec(ζMs_true[2,:]), vec(ζMs[2,:])) + i = 3 + i_sites_i = findall(clusters .== i) + scatterplot(vec(ζMs_true[1,i_sites_i]), vec(ζMs[1,i_sites_i])) + scatterplot(vec(ζMs_true[2,i_sites_i]), vec(ζMs[2,i_sites_i])) + # + θMs_tr = transMs(ζMs') + scatterplot(vec(θMs_true[1,:]), vec(θMs_tr'[1,:])) + scatterplot(vec(θMs_true[2,:]), vec(θMs_tr'[2,:])) +end + rng = StableRNG(111) # make sure to be the same as when constructing train_dataloader (; xM, θP_true, θMs_true, xP, y_true, y_o, y_unc, ) = gen_hybridproblem_synthetic(rng, prob; scenario); -n_site, n_batch = get_hybridproblem_n_site_and_batch(prob; scenario) i_sites = 1:n_site fneglogden = get_hybridproblem_neg_logden_obs(prob; scenario) @testset "gen_hybridproblem_synthetic" begin + @test eltype(xM) == eltype(θP_true) == eltype(θMs_true) == Float32 @test isapprox( vec(mean(CA.getdata(θMs_true); dims = 2)), CA.getdata(par_templates.θM), rtol = 0.02) @test isapprox(vec(std(CA.getdata(θMs_true); dims = 2)), - CA.getdata(par_templates.θM) .* 0.1, rtol = 0.02) + CA.getdata(par_templates.θM) .* 0.1, rtol = 0.1) @test size(xP) == (16, n_site) @test size(y_o) == (8, n_site) @@ -57,6 +128,7 @@ fneglogden = get_hybridproblem_neg_logden_obs(prob; scenario) @test gen2.y_o == y_o end + @testset "f_doubleMM_Matrix" begin is = repeat((1:length(θP_true))', n_site) θvec = CA.ComponentVector(P = θP_true, Ms = θMs_true) @@ -78,25 +150,25 @@ end #y = CP.DoubleMM.f_doubleMM(θ, xPM, θpos) end end - y = @inferred fy(θvec, xPM) + (y, _addq) = @inferred fy(θvec, xPM) f_batch = PBMSiteApplicator(CP.DoubleMM.f_doubleMM; θP = θP_true, θM = θMs_true[:,1], θFix=CA.ComponentVector(), xPvec=xP[:,1]) - y_exp = f_batch(θP_true, θMs_true', xP) + (y_exp, _addq_exp) = f_batch(θP_true, θMs_true', xP) @test y == y_exp - ygrad = Zygote.gradient(θv -> sum(fy(θv, xPM)), θvec)[1] + ygrad = Zygote.gradient(θv -> sum(fy(θv, xPM)[1]), θvec)[1] if gdev isa MLDataDevices.AbstractGPUDevice # θg = gdev(θ) # xPMg = gdev(xPM) # yg = CP.DoubleMM.f_doubleMM(θg, xPMg, intθ); θvecg = gdev(θvec); # errors without ";" xPMg = CP.apply_preserve_axes(gdev, xPM); - yg = fy(θvecg, xPMg) - yg = @inferred fy(θvecg, xPMg); + #yg, _addg = fy(θvecg, xPMg) + yg, -addg = @inferred fy(θvecg, xPMg); #@usingany Cthulhu #@descend_code_warntype fy(θvecg, xPMg) @test cdev(yg) == y_exp - ygradg = Zygote.gradient(θv -> sum(fy(θv, xPMg)), θvecg)[1]; + ygradg = Zygote.gradient(θv -> sum(fy(θv, xPMg)[1]), θvecg)[1]; @test ygradg isa CA.ComponentArray @test CA.getdata(ygradg) isa GPUArraysCore.AbstractGPUArray ygradgc = CP.apply_preserve_axes(cdev, ygradg) # can print the cpu version @@ -117,7 +189,7 @@ end (θvec, xPM, y_o, y_unc) -> begin θ = hcat(CA.getdata(θvec.P[is]), CA.getdata(θvec.Ms')) θc = intθ(θ) - y = CP.DoubleMM.f_doubleMM_sites(θc, xPM) + y = CP.DoubleMM.f_doubleMM_sites(θc, xPM)[1] #y = CP.DoubleMM.f_doubleMM(θ, xPM, θpos) res = fneglogden(y_o, y, y_unc) res @@ -198,8 +270,10 @@ end @testset "loss_gf" begin #----------- fit g and θP to y_o (without uncertainty, without transforming θP) g, ϕg0 = get_hybridproblem_MLapplicator(prob; scenario) + pt = get_hybridproblem_par_templates(prob; scenario) (; transP, transM) = get_hybridproblem_transforms(prob; scenario) n_site, n_site_batch = get_hybridproblem_n_site_and_batch(prob; scenario) + frac_cluster_all = fill(1, n_site) f = get_hybridproblem_PBmodel(prob; scenario) f2 = create_nsite_applicator(f, n_site) py = get_hybridproblem_neg_logden_obs(prob; scenario) @@ -220,14 +294,17 @@ end train_loader = get_hybridproblem_train_dataloader(prob; scenario) @assert train_loader.data == (xM, xP, y_o, y_unc, i_sites) pbm_covars = get_hybridproblem_pbmpar_covars(prob; scenario) + intθP = ComponentArrayInterpreter(pt.θP) + intθMs_batch = ComponentArrayInterpreter((n_batch,), pt.θM) + intθMs_site = ComponentArrayInterpreter((n_site,), pt.θM) #loss_gf = get_loss_gf(g, transM, f, intϕ; gdev = identity) - zero_prior_logdensity = CP.get_zero_prior_logdensity( - priorsP, priorsM, par_templates.θP, par_templates.θM) loss_gf = get_loss_gf(g, transM, transP, f, py, intϕ; - pbm_covars, n_site_batch = n_batch, priorsP, priorsM, zero_prior_logdensity) + pbm_covars, n_site_batch = n_batch, priorsP, priorsM, par_templates, + intθMs = intθMs_batch, intθP, frac_cluster_all) loss_gf_site = get_loss_gf(g, transM, transP, f2, py, intϕ; - pbm_covars, n_site_batch = n_site, priorsP, priorsM, zero_prior_logdensity) + pbm_covars, n_site_batch = n_site, priorsP, priorsM, par_templates, + intθMs = intθMs_site, intθP, frac_cluster_all) nLjoint = @inferred first(loss_gf(p0, first(train_loader)...; is_testmode=true)) (xM_batch, xP_batch, y_o_batch, y_unc_batch, i_sites_batch) = first(train_loader) # @usingany Cthulhu @@ -243,9 +320,9 @@ end #optprob, Adam(0.02), callback = callback_loss(100), maxiters = 5000); optprob, Adam(0.02), maxiters = 2000) - (;nLjoint_pen, y_pred, θMs_tr_pred, θP_pred, nLy, neg_log_prior, loss_penalty) = loss_gf_site( + (;nLjoint_pen, y_pred, θMs_tr_pred, θP_pred, nLy, nLprior_P, nLprior_M, loss_penalty) = loss_gf_site( res.u, train_loader.data...; is_testmode=true) - #(nLjoint, y_pred, θMs_tr_pred, θP, nLy, neg_log_prior, loss_penalty) = loss_gf(p0, xM, xP, y_o, y_unc); + #(nLjoint, y_pred, θMs_tr_pred, θP, nLy, nLprior_P, nLprior_M, loss_penalty) = loss_gf(p0, xM, xP, y_o, y_unc); θMs_tr_pred = CA.ComponentArray(θMs_tr_pred, CA.getaxes(θMs_true')) #TODO @test isapprox(par_templates.θP, intϕ(res.u).ϕP, rtol = 0.15) #@test cor(vec(θMs_true), vec(θMs_tr_pred)) > 0.8 @@ -254,10 +331,19 @@ end # started from low values -> increased but not too much above true values # logpdf.(priorsP, θP_pred) # logpdf.(priorsP, par_templates.θP) - @test all(transP(intϕ(p0).ϕP) .< θP_pred .< (1.2 .* par_templates.θP)) + @test all(transP(intϕ(p0).ϕP) .< θP_pred .< (1.2 .* θP_true)) + @test all(0.8 .* θP_true .< θP_pred .< (1.2 .* θP_true)) () -> begin #@usingany UnicodePlots + pdf(priorsP[1], θP_pred[1]) + pdf(priorsP[1], θP_true[1]) + pdf(priorsP[1], transP(intϕ(p0).ϕP)[1]) + #pdf(priorsM[1], transP(intϕ(p0).ϕP)[1]) + + quantile.(priorsM[2], [0.05, 0.5, 0.95]) + loss_gf(p0, xM, xP, y_o, y_unc, i_sites) + # scatterplot(θMs_true'[:,1], θMs_tr_pred[:,1]) scatterplot(θMs_true'[:,2], θMs_tr_pred[:,2]) scatterplot(log.(vec(θMs_true')), log.(vec(θMs_tr_pred))) diff --git a/test/test_elbo.jl b/test/test_elbo.jl index c92ea3d..a2e3818 100644 --- a/test/test_elbo.jl +++ b/test/test_elbo.jl @@ -24,15 +24,37 @@ ggdev = gpu_device() rng = StableRNG(111) +@testset "compute_invcov" begin + A = Hermitian(rand(3,3) + I) + covU = cholesky(A).U + σ = diag(covU) + corU = covU * inv(Diagonal(σ)) + @test isapprox(corU * Diagonal(σ), covU, rtol=0.01) + Apred = CP.compute_cov(corU, σ) + @test isapprox(Apred, A, rtol=0.01) + inv_pred = CP.compute_invcov(corU, σ) + @test isapprox(inv_pred, inv(A), rtol=0.1) +end + const prob = DoubleMM.DoubleMMCase() +scenario = Val((:covarK2,)) +scenario = Val((:scalingall,)) +scenario = Val((:sepvar,)) scenario = Val((:default,)) -#scenario = Val((:covarK2,)) +scenario = Val((:clustered_sites,)) +pt = get_hybridproblem_par_templates(prob; scenario) +FT = eltype(pt.θM) #approx = MeanHVIApproximationMat() #approx = MeanVarSepHVIApproximation() +#approx = MeanScalingHVIApproximation([length(pt.θM)], FT(2) .* log.([FT(0.1) * pt.θM[end]])) -test_scenario = (scenario, approx) -> begin - probc = HybridProblem(prob; scenario, approx); +test_scenario = (scenario) -> begin + #probc = HybridProblem(prob; scenario, approx); + probc = HybridProblem(prob; scenario); + # tmp = first(get_hybridproblem_train_dataloader(prob; scenario))[1] + # probc.g(tmp, probc.ϕg) + #@assert typeof(probc.approx) == typeof(approx) FT = get_hybridproblem_float_type(probc; scenario) par_templates = get_hybridproblem_par_templates(probc; scenario) int_P, int_M = map(ComponentArrayInterpreter, par_templates) @@ -60,7 +82,7 @@ test_scenario = (scenario, approx) -> begin f = get_hybridproblem_PBmodel(probc; scenario) f_pred = create_nsite_applicator(f, n_site) - n_θM, n_θP = values(map(length, par_templates)) + n_θP, n_θM = values(map(length, par_templates)) py = neg_logden_indep_normal @@ -74,10 +96,12 @@ test_scenario = (scenario, approx) -> begin # transP = elementwise(exp) # transM = Stacked(elementwise(identity), elementwise(exp)) #transM = Stacked(elementwise(identity), elementwise(exp), elementwise(exp)) # test mismatch - ϕq0 = init_hybrid_ϕq(approx, par_templates.θP, par_templates.θM, transP, cor_ends; transM, n_site) - # ϕunc0 = init_hybrid_ϕunc(cor_ends, zero(FT)) + (;ϕqc, approx) = tmp = init_hybrid_ϕq( + probc.approx, par_templates.θP, par_templates.θM, transP, cor_ends; transM, n_site) + probc = HybridProblem(probc; approx) # update approx in probc + # (ϕunc0, approx) = init_hybrid_ϕunc(cor_ends, zero(FT)) # ϕq0 = CP.update_μP_by_θP(ϕunc0, θP_true, transP) - (; ϕ, interpreters) = init_hybrid_params(ϕg0, ϕq0) + (; ϕ, interpreters) = init_hybrid_params(ϕg0, ϕqc) int_ϕq = interpreters.ϕq int_ϕg_ϕq = interpreters.ϕg_ϕq ϕ_ini = ϕ @@ -96,9 +120,10 @@ test_scenario = (scenario, approx) -> begin i_sites = 1:n_batch ζsP, ζsMs_tr, σ = @inferred ( + # @usingany Cthulhu # @descend_code_warntype ( CP.generate_ζ( - approx, rng, g, ϕ_ini, xM[:, i_sites]; + probc.approx, rng, g, ϕ_ini, xM[:, i_sites]; n_MC, cor_ends, pbm_covar_indices, i_sites, int_ϕq=interpreters.ϕq, int_ϕg_ϕq=interpreters.ϕg_ϕq, is_testmode = false) @@ -116,7 +141,7 @@ test_scenario = (scenario, approx) -> begin gr = Zygote.gradient( ϕ -> begin _ζsP, _ζsMs_tr, _σ = CP.generate_ζ( - approx, rng, g, ϕ, xM[:, i_sites]; + probc.approx, rng, g, ϕ, xM[:, i_sites]; i_sites, n_MC=8, cor_ends, pbm_covar_indices, int_ϕq=interpreters.ϕq, int_ϕg_ϕq=interpreters.ϕg_ϕq, @@ -126,7 +151,7 @@ test_scenario = (scenario, approx) -> begin @test gr[1] isa Vector end - if !(:covarK2 ∈ CP._val_value(scenario)) && (approx isa MeanHVIApproximation) + if !(:covarK2 ∈ CP._val_value(scenario)) && (probc.approx isa MeanHVIApproximation) # can only test distribution if g is not repeated @testset "generate_ζ check sd residuals $(last(CP._val_value(scenario)))" begin # prescribe very different uncertainties @@ -163,7 +188,7 @@ test_scenario = (scenario, approx) -> begin _ζsP, _ζsMs_tr, _σ = @inferred ( # @descend_code_warntype ( CP.generate_ζ( - approx, rng, g, _ϕ, xM_batch; + probc.approx, rng, g, _ϕ, xM_batch; i_sites, n_MC = n_predict, cor_ends, pbm_covar_indices, int_ϕq=interpreters.ϕq, int_ϕg_ϕq=interpreters.ϕg_ϕq, @@ -250,7 +275,7 @@ test_scenario = (scenario, approx) -> begin ζsP_d, ζsMs_tr_d, σ_d = @inferred ( # @descend_code_warntype ( CP.generate_ζ( - approx, rng, g_gpu, ϕ, xMg_batch; + probc.approx, rng, g_gpu, ϕ, xMg_batch; i_sites, n_MC, cor_ends, pbm_covar_indices, int_ϕq=interpreters.ϕq, int_ϕg_ϕq=interpreters.ϕg_ϕq, @@ -265,7 +290,7 @@ test_scenario = (scenario, approx) -> begin gr = Zygote.gradient( ϕ -> begin _ζsP, _ζsMs_tr, _σ = CP.generate_ζ( - approx, rng, g_gpu, ϕ, xMg_batch; + probc.approx, rng, g_gpu, ϕ, xMg_batch; i_sites, n_MC, cor_ends, pbm_covar_indices, int_ϕq=interpreters.ϕq, int_ϕg_ϕq=interpreters.ϕg_ϕq, @@ -348,8 +373,13 @@ test_scenario = (scenario, approx) -> begin end @testset "neg_elbo_gtf cpu $(last(CP._val_value(scenario)))" begin + scen = CP._val_value(scenario) i_sites = 1:n_batch transMs = StackedArray(transM, size(ζsMs_tr, 1)) + #intθMs = ComponentArrayInterpreter((n_batch,), int_M) + intθMs = get_concrete(ComponentArrayInterpreter((n_batch,), int_M)) + n_sites_cluster, clusters = CP.get_clusters(n_site; scenario) + frac_cluster_all = convert.(eltype(ϕ_ini), 1 ./ n_sites_cluster[clusters]) cost = @inferred ( #@descend_code_warntype ( neg_elbo_gtf(rng, ϕ_ini, g, f, py, @@ -358,10 +388,11 @@ test_scenario = (scenario, approx) -> begin cor_ends, pbm_covar_indices, transP, transMs, priorsP, priorsM, is_testmode = true, is_omit_priors = Val(false), zero_prior_logdensity=zero(eltype(ϕ_ini)), - approx, + probc.approx, intθMs, intθP = int_P, frac_cluster_all ) ) - @test cost isa Float64 + #@test cost isa Float64 + @test cost isa promote_type(eltype(xM), eltype(y_o), eltype(ϕ_ini)) gr = Zygote.gradient( ϕ -> neg_elbo_gtf(rng, ϕ, g, f, py, xM[:, i_sites], xP[:, i_sites], y_o[:, i_sites], y_unc[:, i_sites], i_sites; @@ -369,7 +400,7 @@ test_scenario = (scenario, approx) -> begin cor_ends, pbm_covar_indices, transP, transMs, priorsP, priorsM, is_testmode = false, is_omit_priors = Val(false), zero_prior_logdensity=zero(eltype(ϕ_ini)), - approx, + probc.approx, intθMs, intθP = int_P, frac_cluster_all ), CA.getdata(ϕ_ini)) @test gr[1] isa Vector @@ -390,7 +421,7 @@ test_scenario = (scenario, approx) -> begin n_MC=3, cor_ends, pbm_covar_indices, transP, transMs, priorsP, priorsM, is_testmode = true, is_omit_priors = Val(false), zero_prior_logdensity=zero(eltype(ϕ_ini)), - approx, + probc.approx, ) ) @test cost isa Float64 @@ -401,7 +432,7 @@ test_scenario = (scenario, approx) -> begin n_MC=3, cor_ends, pbm_covar_indices, transP, transMs, priorsP, priorsM, is_testmode = false, is_omit_priors = Val(false), zero_prior_logdensity=zero(eltype(ϕ_ini)), - approx, + probc.approx, ), ϕ) @test gr[1] isa GPUArraysCore.AbstractGPUVector @@ -425,7 +456,7 @@ test_scenario = (scenario, approx) -> begin cdev = identity, n_sample_pred, cor_ends, pbm_covar_indices, is_testmode = true, - approx, + probc.approx, ) ) @test θsP isa AbstractMatrix @@ -434,7 +465,7 @@ test_scenario = (scenario, approx) -> begin θsPc = int_mP(θsP) @test all(θsPc[:r0, :] .> 0) # - y = @inferred f_pred(θsP, θsMs_tr, xP) + (y, addq) = @inferred f_pred(θsP, θsMs_tr, xP) @test y isa Array @test size(y) == (size(y_o)..., n_sample_pred) end @@ -454,7 +485,7 @@ test_scenario = (scenario, approx) -> begin cdev = identity, # do not transfer to CPU n_sample_pred, cor_ends, pbm_covar_indices, is_testmode = true, - approx, + probc.approx, ) ) # this variant without the problem, does not attach axes @@ -498,8 +529,11 @@ test_scenario = (scenario, approx) -> begin end # test_scenario -test_scenario(Val((:default,)), MeanHVIApproximationMat()) -test_scenario(Val((:default,)), MeanVarSepHVIApproximation()) +#test_scenario(Val((:scalingall,))) +test_scenario(Val((:clustered_sites,))) +test_scenario(Val((:default,))) +test_scenario(Val((:sepvar,))) # with providing process parameter as additional covariate -test_scenario(Val((:covarK2,)), MeanHVIApproximationMat()) +test_scenario(Val((:covarK2,))) + diff --git a/test/test_missingdriver.jl b/test/test_missingdriver.jl index b8247b3..94ebdce 100644 --- a/test/test_missingdriver.jl +++ b/test/test_missingdriver.jl @@ -82,6 +82,7 @@ end function test_driverNaN(scenario::Val{scen}) where scen + #scen = CP._val_value(scenario) prob = HybridProblem(DoubleMM.DoubleMMCase(); scenario); if (:use_rangescaling ∈ scen) @test prob.g isa RangeScalingModelApplicator diff --git a/test/test_util.jl b/test/test_util.jl index e273924..913bec6 100644 --- a/test/test_util.jl +++ b/test/test_util.jl @@ -1,7 +1,97 @@ using Test -using HybridVariationalInference: vectuptotupvec_allowmissing, vectuptotupvec +using HybridVariationalInference: vectuptotupvec_allowmissing, vectuptotupvec, insert_zeros +using HybridVariationalInference: HybridVariationalInference as HVI using Zygote + +@testset "OneBasedVectorWithZero" begin + # Standard Julia 1-based vector (no underlying shift) + v1 = HVI.OneBasedVectorWithZero([10,20,30]) + @test @inferred v1[1] == 10 + @test v1[2] == 20 + @test v1[3] == 30 + @test v1[[true,true,true]] == [10,20,30] + + v1[1] = 100 + @test v1[1] == 100 + @test v1.data[1] == 100 + + # Underlying 0-based vector should still present 1-based API. + struct ZeroBasedVector{T} <: AbstractVector{T} + data::Vector{T} + end + Base.size(v::ZeroBasedVector) = size(v.data) + Base.length(v::ZeroBasedVector) = length(v.data) + Base.axes(v::ZeroBasedVector) = (0:length(v)-1,) + Base.getindex(v::ZeroBasedVector, i::Integer) = v.data[i+1] + Base.setindex!(v::ZeroBasedVector, x, i::Integer) = (v.data[i+1] = x) + + data0 = ZeroBasedVector([10,20,30]) + v0 = HVI.OneBasedVectorWithZero(data0) + @test eltype(v0) == Int + @test @inferred v0[1] == 10 + @test v0[2] == 20 + @test v0[3] == 30 + + v0[1] = 100 + @test v0[1] == 100 + @test v0.data[0] == 100 + + # bounds for non-zero indices should be 1..length for wrapper independent of underlying axis + @test v0[0] == 0 + @test_throws BoundsError v0[-1] + @test_throws BoundsError v0[4] + @test_throws BoundsError v0[0] = 50 + + @test collect(v0) == [100,20,30] + @test length(v0) == 3 + @test axes(v0) == (Base.OneTo(3),) + + # gradient pass through values works + g = Zygote.gradient(x -> sum(HVI.OneBasedVectorWithZero(x)), [1.0, 2.0, 3.0]) + @test g == ([1.0,1.0,1.0],) + + v1 = HVI.OneBasedVectorWithZero([10,20,30]) + # @usingany Cthulhu + # @descend_code_warntype v1[0] + @inferred v1[0] + @test v1[0] == 0 # default value at index 0 is zero + v1 = HVI.OneBasedVectorWithZero([10,20,30]; val_at_zero=-5) + @test @inferred(v1[0]) == -5 # default value at index 0 is zero + @test v1[[1,1,2,3]] == [10,10,20,30] + @test v1[[1,0,0,3]] == [10,-5,-5,30] + g1 = Zygote.gradient(y -> sum(HVI.OneBasedVectorWithZero(y)[[1,1,0,0,2]]), [1.0,2.0,3.0]) + @test g1 == ([2.0,1.0,0.0],) +end; + +@testset "take_n!" begin + it = HVI.drop_iterate(1:5) # initialize the iterator + a1 = HVI.take_n!(it,3) + @test collect(a1) == [1,2,3] + a2 = HVI.take_n!(it,3) + @test collect(a2) == [4,5] # only two element left, so return those + a3 = HVI.take_n!(it,3) + @test collect(a3) == [] # no elements left, so return empty vector +end + +@testset "insert_zeros" begin + @test @inferred HVI.insert_zeros([1,2,3], [2,5]) == [1,0,2,3,0] + @test @inferred HVI.insert_zeros([1,2,3], [1,5]) == [0,1,2,3,0] + @test_throws AssertionError HVI.insert_zeros([1,2,3], [2,4]) + @test_throws AssertionError HVI.insert_zeros([1,2,3], [1,1,6]) + Zygote.gradient(x -> sum(HVI.insert_zeros(x, [2,5])), [1,2,3]) + + # + v = HVI.OneBasedVectorWithZero([10,20,30]) + idxs = HVI.insert_zeros(1:length(v), [2,5]) + res = @inferred v[idxs] + # @usingany Cthulhu + #@descend_code_warntype v[idxs] + @test res == [10, 0, 20, 30, 0] + @test Zygote.gradient(x -> sum(x[idxs]), v) == ([1.0, 1.0, 1.0],) + +end; + @testset "vectuptotupvec" begin vectup = [(1,1.01, "string 1"), (2,2.02, "string 2")] tupvec = @inferred vectuptotupvec(vectup) diff --git a/test/test_util_ca.jl b/test/test_util_ca.jl index a4871ad..c87f85b 100644 --- a/test/test_util_ca.jl +++ b/test/test_util_ca.jl @@ -2,6 +2,7 @@ using Test using HybridVariationalInference using HybridVariationalInference: HybridVariationalInference as CP using ComponentArrays: ComponentArrays as CA +using DataFrames @testset "compose_axes" begin @test (@inferred CP._add_interval(;ranges=(Val(1:3),), length = Val(2))) == (Val(1:3), Val(4:5)) @@ -17,3 +18,46 @@ using ComponentArrays: ComponentArrays as CA @test axc == CA.getaxes(vt)[1] end +@testset "as_data_frame" begin + v1 = CA.ComponentVector(a=1.1, b=2.1) + v2 = CA.ComponentVector(a=1.2, b=2.2) + cm = vcat(v1',v2') + df = as_data_frame(cm) + @test names(df) == ["a", "b"] + @test collect(df[1,:]) == CA.getdata(v1) + @test collect(df[2,:]) == CA.getdata(v2) + v2c = copy(v2) + # copy: + v2[1] = 1.3 + @test collect(df[2,:]) == CA.getdata(v2c) + # + # names in first dimension + cmt = hcat(v1,v2) + df = as_data_frame(cmt) + @test collect(df[1,:]) == CA.getdata(v1) + @test collect(df[2,:]) == CA.getdata(v2) + # + cm = cmt' + df = as_data_frame(cm) + @test collect(df[1,:]) == CA.getdata(v1) + @test collect(df[2,:]) == CA.getdata(v2) + # + cma = stack([cm,cm .* 10]) + df = as_data_frame(cma) + @test Array(df[1:2,1:2]) == CA.getdata(cm) + @test Array(df[3:4,1:2]) == CA.getdata(cm .* 10) + @test all(vec(df[1:2,:dim3]) .== 1) + @test all(vec(df[3:4,:dim3]) .== 2) + # + cma1 = stack([cm',cm' .* 10]) + df1 = as_data_frame(cma1) + @test df1 == df + # + cma4 = stack([cma, cma .* 10]) + df = as_data_frame(cma4) + # + cma41 = stack([cma1, cma1 .* 10]) + df2 = as_data_frame(cma41) + @test df2 == df +end +