diff --git a/Manifest.toml b/Manifest.toml index 11b41ad..bb442be 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -23,9 +23,9 @@ version = "0.3.0" [[deps.Adapt]] deps = ["LinearAlgebra"] -git-tree-sha1 = "af92965fb30777147966f58acb05da51c5616b5f" +git-tree-sha1 = "195c5505521008abea5aee4f96930717958eac6f" uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -version = "3.3.3" +version = "3.4.0" [[deps.ArgTools]] uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" @@ -38,15 +38,15 @@ version = "0.2.0" [[deps.ArrayInterface]] deps = ["ArrayInterfaceCore", "Compat", "IfElse", "LinearAlgebra", "Static"] -git-tree-sha1 = "6ccb71b40b04ad69152f1f83d5925de13911417e" +git-tree-sha1 = "d6173480145eb632d6571c148d94b9d3d773820e" uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" -version = "6.0.19" +version = "6.0.23" [[deps.ArrayInterfaceCore]] deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"] -git-tree-sha1 = "7d255eb1d2e409335835dc8624c35d97453011eb" +git-tree-sha1 = "5bb0f8292405a516880a3809954cb832ae7a31c5" uuid = "30b0a656-2188-435a-8636-2ec0e6a096e2" -version = "0.1.14" +version = "0.1.20" [[deps.ArrayInterfaceOffsetArrays]] deps = ["ArrayInterface", "OffsetArrays", "Static"] @@ -137,9 +137,9 @@ version = "1.2.0" [[deps.CPUSummary]] deps = ["CpuId", "IfElse", "Static"] -git-tree-sha1 = "b1a532a582dd18b34543366322d390e1560d40a9" +git-tree-sha1 = "9bdd5aceea9fa109073ace6b430a24839d79315e" uuid = "2a0fbf3d-bb9c-48f3-b0a9-814d99fd7ab9" -version = "0.1.23" +version = "0.1.27" [[deps.CUDA]] deps = ["AbstractFFTs", "Adapt", "BFloat16s", "CEnum", "CompilerSupportLibraries_jll", "ExprTools", "GPUArrays", "GPUCompiler", "LLVM", "LazyArtifacts", "Libdl", "LinearAlgebra", "Logging", "Printf", "Random", "Random123", "RandomNumbers", "Reexport", "Requires", "SparseArrays", "SpecialFunctions", "TimerOutputs"] @@ -160,22 +160,22 @@ uuid = "aafaddc9-749c-510e-ac4f-586e18779b91" version = "0.2.2" [[deps.ChainRules]] -deps = ["ChainRulesCore", "Compat", "Distributed", "IrrationalConstants", "LinearAlgebra", "Random", "RealDot", "SparseArrays", "Statistics"] -git-tree-sha1 = "cc81c5c6bab557f89e4b5951b252d7ab863639a4" +deps = ["Adapt", "ChainRulesCore", "Compat", "Distributed", "GPUArraysCore", "IrrationalConstants", "LinearAlgebra", "Random", "RealDot", "SparseArrays", "Statistics", "StructArrays"] +git-tree-sha1 = "a5fd229d3569a6600ae47abe8cd48cbeb972e173" uuid = "082447d4-558c-5d27-93f4-14fc19e9eca2" -version = "1.37.0" +version = "1.44.6" [[deps.ChainRulesCore]] deps = ["Compat", "LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "2dd813e5f2f7eec2d1268c57cf2373d3ee91fcea" +git-tree-sha1 = "dc4405cee4b2fe9e1108caec2d760b7ea758eca2" uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.15.1" +version = "1.15.5" [[deps.ChangesOfVariables]] deps = ["ChainRulesCore", "LinearAlgebra", "Test"] -git-tree-sha1 = "1e315e3f4b0b7ce40feded39c73049692126cf53" +git-tree-sha1 = "38f7a08f19d8810338d4f5085211c7dfa5d5bdd8" uuid = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0" -version = "0.1.3" +version = "0.1.4" [[deps.CloseOpenIntervals]] deps = ["ArrayInterface", "Static"] @@ -227,9 +227,9 @@ version = "0.3.0" [[deps.Compat]] deps = ["Dates", "LinearAlgebra", "UUIDs"] -git-tree-sha1 = "924cdca592bc16f14d2f7006754a621735280b74" +git-tree-sha1 = "5856d3031cdb1f3b2b6340dfdc66b6d9a149a374" uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "4.1.0" +version = "4.2.0" [[deps.CompilerSupportLibraries_jll]] deps = ["Artifacts", "Libdl"] @@ -246,6 +246,12 @@ git-tree-sha1 = "6e47d11ea2776bc5627421d59cdcc1296c058071" uuid = "8f4d0f93-b110-5947-807f-2305c1781a2d" version = "1.7.0" +[[deps.ConstructionBase]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "fb21ddd70a051d882a1686a5a550990bbe371a95" +uuid = "187b0558-2788-49d3-abe0-74a17ed4e7c9" +version = "1.4.1" + [[deps.CoordinateTransformations]] deps = ["LinearAlgebra", "StaticArrays"] git-tree-sha1 = "681ea870b918e7cff7111da58791d7f718067a19" @@ -265,9 +271,9 @@ version = "1.0.2" [[deps.DSP]] deps = ["Compat", "FFTW", "IterTools", "LinearAlgebra", "Polynomials", "Random", "Reexport", "SpecialFunctions", "Statistics"] -git-tree-sha1 = "3fb5d9183b38fdee997151f723da42fb83d1c6f2" +git-tree-sha1 = "4ba2a190a9d05a36e8c26182eb1ba06cd12c1051" uuid = "717857b8-e6f2-59f4-9121-6e50c889abd2" -version = "0.7.6" +version = "0.7.7" [[deps.DataAPI]] git-tree-sha1 = "fb5f5316dd3fd4c5e7c30a24d50643b73e37cd40" @@ -280,6 +286,11 @@ git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb" uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" version = "0.17.20" +[[deps.DataValueInterfaces]] +git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" +uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464" +version = "1.0.0" + [[deps.Dates]] deps = ["Printf"] uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" @@ -308,9 +319,9 @@ version = "1.0.3" [[deps.DiffRules]] deps = ["IrrationalConstants", "LogExpFunctions", "NaNMath", "Random", "SpecialFunctions"] -git-tree-sha1 = "28d605d9a0ac17118fe2c5e9ce0fbb76c3ceb120" +git-tree-sha1 = "992a23afdb109d0d2f8802a30cf5ae4b1fe7ea68" uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "1.11.0" +version = "1.11.1" [[deps.Distances]] deps = ["LinearAlgebra", "SparseArrays", "Statistics", "StatsAPI"] @@ -381,21 +392,21 @@ version = "3.3.10+0" [[deps.FileIO]] deps = ["Pkg", "Requires", "UUIDs"] -git-tree-sha1 = "9267e5f50b0e12fdfd5a2455534345c4cf2c7f7a" +git-tree-sha1 = "94f5101b96d2d968ace56f7f2db19d0a5f592e28" uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" -version = "1.14.0" +version = "1.15.0" [[deps.FillArrays]] deps = ["LinearAlgebra", "Random", "SparseArrays", "Statistics"] -git-tree-sha1 = "246621d23d1f43e3b9c368bf3b72b2331a27c286" +git-tree-sha1 = "87519eb762f85534445f5cda35be12e32759ee14" uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" -version = "0.13.2" +version = "0.13.4" [[deps.FiniteDiff]] -deps = ["ArrayInterfaceCore", "LinearAlgebra", "Requires", "SparseArrays", "StaticArrays"] -git-tree-sha1 = "ee13c773ce60d9e95a6c6ea134f25605dce2eda3" +deps = ["ArrayInterfaceCore", "LinearAlgebra", "Requires", "Setfield", "SparseArrays", "StaticArrays"] +git-tree-sha1 = "5a2cff9b6b77b33b89f3d97a4d367747adce647e" uuid = "6a86dc24-6348-571c-b903-95158fe2bd41" -version = "2.13.0" +version = "2.15.0" [[deps.FixedPointNumbers]] deps = ["Statistics"] @@ -411,15 +422,19 @@ version = "0.13.4" [[deps.ForwardDiff]] deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "LogExpFunctions", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "2f18915445b248731ec5db4e4a17e451020bf21e" +git-tree-sha1 = "187198a4ed8ccd7b5d99c41b69c679269ea2b2d4" uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.30" +version = "0.10.32" [[deps.Functors]] git-tree-sha1 = "223fffa49ca0ff9ce4f875be001ffe173b2b7de4" uuid = "d9f16b24-f501-4c13-a1f2-28368ffc5196" version = "0.2.8" +[[deps.Future]] +deps = ["Random"] +uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820" + [[deps.FwiFlow]] deps = ["ADCME", "Conda", "DataStructures", "Dierckx", "JSON", "LinearAlgebra", "MAT", "Parameters", "PyCall", "PyPlot", "Random", "Reexport"] git-tree-sha1 = "c7b5d312cc268e58482149f20a006e0d9c6ba57f" @@ -430,21 +445,21 @@ version = "0.3.1" [[deps.GPUArrays]] deps = ["Adapt", "GPUArraysCore", "LLVM", "LinearAlgebra", "Printf", "Random", "Reexport", "Serialization", "Statistics"] -git-tree-sha1 = "470dcaf29237a0818bc2cc97f0c408f0bc052653" +git-tree-sha1 = "45d7deaf05cbb44116ba785d147c518ab46352d7" uuid = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" -version = "8.4.1" +version = "8.5.0" [[deps.GPUArraysCore]] deps = ["Adapt"] -git-tree-sha1 = "4078d3557ab15dd9fe6a0cf6f65e3d4937e98427" +git-tree-sha1 = "6872f5ec8fd1a38880f027a26739d42dcda6691f" uuid = "46192b85-c4d5-4398-a991-12ede77f4527" -version = "0.1.0" +version = "0.1.2" [[deps.GPUCompiler]] deps = ["ExprTools", "InteractiveUtils", "LLVM", "Libdl", "Logging", "TimerOutputs", "UUIDs"] -git-tree-sha1 = "47f63159f7cb5d0e5e0cfd2f20454adea429bec9" +git-tree-sha1 = "ebb892e1df16040a845e1d11087e4fbfe10323a8" uuid = "61eb1bfa-7361-4325-ad38-22787b887f55" -version = "0.16.1" +version = "0.16.4" [[deps.Ghostscript_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] @@ -460,9 +475,9 @@ version = "1.1.2" [[deps.Graphs]] deps = ["ArnoldiMethod", "Compat", "DataStructures", "Distributed", "Inflate", "LinearAlgebra", "Random", "SharedArrays", "SimpleTraits", "SparseArrays", "Statistics"] -git-tree-sha1 = "db5c7e27c0d46fd824d470a3c32a4fc6c935fa96" +git-tree-sha1 = "a6d30bdc378d340912f48abf01281aab68c0dec8" uuid = "86223c79-3864-5bf0-83f7-82e725a168b6" -version = "1.7.1" +version = "1.7.2" [[deps.HDF5]] deps = ["Compat", "HDF5_jll", "Libdl", "Mmap", "Random", "Requires"] @@ -472,15 +487,15 @@ version = "0.16.10" [[deps.HDF5_jll]] deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenSSL_jll", "Pkg", "Zlib_jll"] -git-tree-sha1 = "bab67c0d1c4662d2c4be8c6007751b0b6111de5c" +git-tree-sha1 = "4cc2bb72df6ff40b055295fdef6d92955f9dede8" uuid = "0234f1f7-429e-5d53-9886-15a909be8d59" -version = "1.12.1+0" +version = "1.12.2+2" [[deps.HypergeometricFunctions]] -deps = ["DualNumbers", "LinearAlgebra", "SpecialFunctions", "Test"] -git-tree-sha1 = "cb7099a0109939f16a4d3b572ba8396b1f6c7c31" +deps = ["DualNumbers", "LinearAlgebra", "OpenLibm_jll", "SpecialFunctions", "Test"] +git-tree-sha1 = "709d864e3ed6e3545230601f94e11ebc65994641" uuid = "34004b35-14d8-5ef3-9330-4cdb6864b03a" -version = "0.3.10" +version = "0.3.11" [[deps.IRTools]] deps = ["InteractiveUtils", "MacroTools", "Test"] @@ -525,9 +540,9 @@ version = "0.2.16" [[deps.ImageFiltering]] deps = ["CatIndices", "ComputationalResources", "DataStructures", "FFTViews", "FFTW", "ImageBase", "ImageCore", "LinearAlgebra", "OffsetArrays", "Reexport", "SparseArrays", "StaticArrays", "Statistics", "TiledIteration"] -git-tree-sha1 = "15bd05c1c0d5dbb32a9a3d7e0ad2d50dd6167189" +git-tree-sha1 = "8b251ec0582187eff1ee5c0220501ef30a59d2f7" uuid = "6a3955dd-da59-5b1f-98d4-e7296123deb5" -version = "0.7.1" +version = "0.7.2" [[deps.ImageIO]] deps = ["FileIO", "IndirectArrays", "JpegTurbo", "LazyModules", "Netpbm", "OpenEXR", "PNGFiles", "QOI", "Sixel", "TiffImages", "UUIDs"] @@ -543,9 +558,9 @@ version = "1.2.1" [[deps.ImageMagick_jll]] deps = ["Artifacts", "Ghostscript_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "Pkg", "Zlib_jll", "libpng_jll"] -git-tree-sha1 = "f025b79883f361fa1bd80ad132773161d231fd9f" +git-tree-sha1 = "124626988534986113cfd876e3093e4a03890f58" uuid = "c73af94c-d91f-53ed-93a7-00f77d67a9d7" -version = "6.9.12+2" +version = "6.9.12+3" [[deps.ImageMetadata]] deps = ["AxisArrays", "ImageAxes", "ImageBase", "ImageCore"] @@ -561,9 +576,9 @@ version = "0.3.2" [[deps.ImageQualityIndexes]] deps = ["ImageContrastAdjustment", "ImageCore", "ImageDistances", "ImageFiltering", "LazyModules", "OffsetArrays", "Statistics"] -git-tree-sha1 = "40c9e991dbe0782a1422e6dca6c487158f3ca848" +git-tree-sha1 = "0c703732335a75e683aec7fdfc6d5d1ebd7c596f" uuid = "2996bd0c-7a13-11e9-2da2-2f5ce47296a9" -version = "0.3.2" +version = "0.3.3" [[deps.ImageSegmentation]] deps = ["Clustering", "DataStructures", "Distances", "Graphs", "ImageCore", "ImageFiltering", "ImageMorphology", "LinearAlgebra", "MetaGraphs", "RegionTrees", "SimpleWeightedGraphs", "StaticArrays", "Statistics"] @@ -579,9 +594,9 @@ version = "0.3.6" [[deps.ImageTransformations]] deps = ["AxisAlgorithms", "ColorVectorSpace", "CoordinateTransformations", "ImageBase", "ImageCore", "Interpolations", "OffsetArrays", "Rotations", "StaticArrays"] -git-tree-sha1 = "42fe8de1fe1f80dab37a39d391b6301f7aeaa7b8" +git-tree-sha1 = "8717482f4a2108c9358e5c3ca903d3a6113badc9" uuid = "02fcd773-0e25-5acc-982a-7f6622650795" -version = "0.9.4" +version = "0.9.5" [[deps.Images]] deps = ["Base64", "FileIO", "Graphics", "ImageAxes", "ImageBase", "ImageContrastAdjustment", "ImageCore", "ImageDistances", "ImageFiltering", "ImageIO", "ImageMagick", "ImageMetadata", "ImageMorphology", "ImageQualityIndexes", "ImageSegmentation", "ImageShow", "ImageTransformations", "IndirectArrays", "IntegralArrays", "Random", "Reexport", "SparseArrays", "StaticArrays", "Statistics", "StatsBase", "TiledIteration"] @@ -601,9 +616,9 @@ uuid = "9b13fd28-a010-5f03-acff-a1bbcff69959" version = "1.0.0" [[deps.Inflate]] -git-tree-sha1 = "f5fc07d4e706b84f72d54eedcc1c13d92fb0871c" +git-tree-sha1 = "5cd07aab533df5170988219191dfad0519391428" uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9" -version = "0.1.2" +version = "0.1.3" [[deps.InplaceOps]] deps = ["LinearAlgebra", "Test"] @@ -640,9 +655,9 @@ version = "0.13.6" [[deps.IntervalSets]] deps = ["Dates", "Random", "Statistics"] -git-tree-sha1 = "57af5939800bce15980bddd2426912c4f83012d8" +git-tree-sha1 = "076bb0da51a8c8d1229936a1af7bdfacd65037e1" uuid = "8197267c-284f-5f27-9208-e0e47529a953" -version = "0.7.1" +version = "0.7.2" [[deps.InverseFunctions]] deps = ["Test"] @@ -674,6 +689,11 @@ git-tree-sha1 = "1169632f425f79429f245113b775a0e3d121457c" uuid = "42fd0dbc-a981-5370-80f2-aaf504508153" version = "0.9.2" +[[deps.IteratorInterfaceExtensions]] +git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856" +uuid = "82899510-4779-5014-852e-03e436cf321d" +version = "1.0.0" + [[deps.JLD2]] deps = ["FileIO", "MacroTools", "Mmap", "OrderedCollections", "Pkg", "Printf", "Reexport", "TranscodingStreams", "UUIDs"] git-tree-sha1 = "81b9477b49402b47fbe7f7ae0b252077f53e4a08" @@ -688,9 +708,9 @@ version = "1.4.1" [[deps.JOLI]] deps = ["Distributed", "DistributedArrays", "FFTW", "InplaceOps", "InteractiveUtils", "IterativeSolvers", "Libdl", "LinearAlgebra", "NFFT", "Nullables", "Printf", "PyCall", "Random", "SharedArrays", "SparseArrays", "SpecialFunctions", "Wavelets"] -git-tree-sha1 = "6e6ada8b6ec1c37d8a4807e49c0d75615d9f0b6d" +git-tree-sha1 = "aa9069d95c8e061e86725614fb2211fa998bba7c" uuid = "bb331ad6-a1cf-11e9-23da-9bcb53c69f6f" -version = "0.8.0" +version = "0.8.1" [[deps.JSON]] deps = ["Dates", "Mmap", "Parsers", "Unicode"] @@ -791,9 +811,9 @@ uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" [[deps.LogExpFunctions]] deps = ["ChainRulesCore", "ChangesOfVariables", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] -git-tree-sha1 = "09e4b894ce6a976c354a69041a04748180d43637" +git-tree-sha1 = "94d9c52ca447e23eac0c0f074effbcd38830deb5" uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -version = "0.3.15" +version = "0.3.18" [[deps.Logging]] uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" @@ -806,9 +826,9 @@ version = "0.10.3" [[deps.MKL_jll]] deps = ["Artifacts", "IntelOpenMP_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "Pkg"] -git-tree-sha1 = "e595b205efd49508358f7dc670a940c790204629" +git-tree-sha1 = "41d162ae9c868218b1f3fe78cba878aa348c2d26" uuid = "856f044c-d86e-5d09-b602-aeab76dc8ba7" -version = "2022.0.0+0" +version = "2022.1.0+0" [[deps.MLUtils]] deps = ["ChainRulesCore", "DelimitedFiles", "Random", "ShowCases", "Statistics", "StatsBase"] @@ -864,12 +884,6 @@ version = "0.3.3" [[deps.MozillaCACerts_jll]] uuid = "14a3606d-f60d-562e-9121-12d972cd8159" -[[deps.MutableArithmetics]] -deps = ["LinearAlgebra", "SparseArrays", "Test"] -git-tree-sha1 = "4e675d6e9ec02061800d6cfb695812becbd03cdf" -uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0" -version = "1.0.4" - [[deps.NFFT]] deps = ["AbstractNFFTs", "Distributed", "FFTW", "Graphics", "LinearAlgebra", "Polyester", "Printf", "Random", "Reexport", "SparseArrays", "SpecialFunctions"] git-tree-sha1 = "b1a9c00edd709a8dddbcff78ef5466c74a781b0d" @@ -947,9 +961,9 @@ uuid = "05823500-19ac-5b8b-9628-191a04bc5112" [[deps.OpenSSL_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "9a36165cf84cff35851809a40a928e1103702013" +git-tree-sha1 = "e60321e3f2616584ff98f0a4f18d98ae6f89bbb3" uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" -version = "1.1.16+0" +version = "1.1.17+0" [[deps.OpenSpecFun_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] @@ -959,9 +973,9 @@ version = "0.5.5+0" [[deps.Optimisers]] deps = ["ChainRulesCore", "Functors", "LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "afb2b39a354025a6db6decd68f2ef5353e8ff1ae" +git-tree-sha1 = "1ef34738708e3f31994b52693286dabcb3d29f6b" uuid = "3bd65402-5787-11e9-1adc-39752487f4e2" -version = "0.2.7" +version = "0.2.9" [[deps.OrderedCollections]] git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" @@ -970,9 +984,9 @@ version = "1.4.1" [[deps.PDMats]] deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"] -git-tree-sha1 = "9351c1a6c0e922cc862bd96822a98c9029a6d142" +git-tree-sha1 = "cf494dca75a69712a72b80bc48f59dcf3dea63ec" uuid = "90014a1f-27ba-587c-ab20-58faa44d9150" -version = "0.11.15" +version = "0.11.16" [[deps.PNGFiles]] deps = ["Base64", "CEnum", "ImageCore", "IndirectArrays", "OffsetArrays", "libpng_jll"] @@ -994,9 +1008,9 @@ version = "0.12.3" [[deps.Parsers]] deps = ["Dates"] -git-tree-sha1 = "0044b23da09b5608b4ecacb4e5e6c6332f833a7e" +git-tree-sha1 = "3d5bf43e3e8b412656404ed9466f1dcbf7c50269" uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "2.3.2" +version = "2.4.0" [[deps.Pkg]] deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] @@ -1010,21 +1024,21 @@ version = "0.1.1" [[deps.Polyester]] deps = ["ArrayInterface", "BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "ManualMemory", "PolyesterWeave", "Requires", "Static", "StrideArraysCore", "ThreadingUtilities"] -git-tree-sha1 = "97bbf8dc886d67ff0dd1f56cfc0ee18b7bb7f8ce" +git-tree-sha1 = "6ee5518f7baa05e154757a003bfb6936a174dbad" uuid = "f517fe37-dbe3-4b94-8317-1923a5111588" -version = "0.6.13" +version = "0.6.15" [[deps.PolyesterWeave]] deps = ["BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "Static", "ThreadingUtilities"] -git-tree-sha1 = "4cd738fca4d826bef1a87cbe43196b34fa205e6d" +git-tree-sha1 = "b42fb2292fbbaed36f25d33a15c8cc0b4f287fcf" uuid = "1d0040c9-8b98-4ee7-8388-3f51789ca0ad" -version = "0.1.6" +version = "0.1.10" [[deps.Polynomials]] -deps = ["LinearAlgebra", "MutableArithmetics", "RecipesBase"] -git-tree-sha1 = "5d389e6481b9d6c81d73ee9a74d1fd74f8b25abe" +deps = ["LinearAlgebra", "RecipesBase"] +git-tree-sha1 = "3010a6dd6ad4c7384d2f38c58fa8172797d879c1" uuid = "f27b6e38-b328-58d1-80ce-0feddd5e7a45" -version = "3.1.4" +version = "3.2.0" [[deps.Preferences]] deps = ["TOML"] @@ -1060,9 +1074,9 @@ version = "1.7.2" [[deps.PyCall]] deps = ["Conda", "Dates", "Libdl", "LinearAlgebra", "MacroTools", "Serialization", "VersionParsing"] -git-tree-sha1 = "1fc929f47d7c151c839c5fc1375929766fb8edcc" +git-tree-sha1 = "53b8b07b721b77144a0fbbbc2675222ebf40a02d" uuid = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" -version = "1.93.1" +version = "1.94.1" [[deps.PyPlot]] deps = ["Colors", "LaTeXStrings", "PyCall", "Sockets", "Test", "VersionParsing"] @@ -1078,9 +1092,9 @@ version = "1.0.0" [[deps.QuadGK]] deps = ["DataStructures", "LinearAlgebra"] -git-tree-sha1 = "78aadffb3efd2155af139781b8a8df1ef279ea39" +git-tree-sha1 = "3c009334f45dfd546a16a57960a821a1a023d241" uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc" -version = "2.4.2" +version = "2.5.0" [[deps.Quaternions]] deps = ["DualNumbers", "LinearAlgebra", "Random"] @@ -1098,9 +1112,9 @@ uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" [[deps.Random123]] deps = ["Random", "RandomNumbers"] -git-tree-sha1 = "afeacaecf4ed1649555a19cb2cad3c141bbc9474" +git-tree-sha1 = "7a1a306b72cfa60634f03a911405f4e64d1b718b" uuid = "74087812-796a-5b5d-8853-05524746bad3" -version = "1.5.0" +version = "1.6.0" [[deps.RandomNumbers]] deps = ["Random", "Requires"] @@ -1161,9 +1175,9 @@ version = "0.3.0+0" [[deps.Rotations]] deps = ["LinearAlgebra", "Quaternions", "Random", "StaticArrays", "Statistics"] -git-tree-sha1 = "3177100077c68060d63dd71aec209373c3ec339b" +git-tree-sha1 = "3d52be96f2ff8a4591a9e2440036d4339ac9a2f7" uuid = "6038ab10-8711-5258-84ad-4b1120ba62dc" -version = "1.3.1" +version = "1.3.2" [[deps.SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" @@ -1175,9 +1189,9 @@ version = "0.1.0" [[deps.Scratch]] deps = ["Dates"] -git-tree-sha1 = "0b4b7f1393cff97c33891da2a0bf69c6ed241fda" +git-tree-sha1 = "f94f779c94e58bf9ea243e77a37e16d9de9126bd" uuid = "6c6a2e73-6563-6170-7368-637461726353" -version = "1.1.0" +version = "1.1.1" [[deps.SegyIO]] deps = ["Distributed", "Printf", "Test"] @@ -1202,6 +1216,12 @@ git-tree-sha1 = "e6ac290d9a448a9d309af4b02ecf8ba2e0dfc944" uuid = "335f7d24-6316-57dd-9c3a-df470f2b739e" version = "0.2.3" +[[deps.Setfield]] +deps = ["ConstructionBase", "Future", "MacroTools", "StaticArraysCore"] +git-tree-sha1 = "e2cc6d8c88613c05e1defb55170bf5ff211fbeac" +uuid = "efcf1570-3423-57d1-acb7-fd33fddbac46" +version = "1.1.1" + [[deps.SharedArrays]] deps = ["Distributed", "Mmap", "Random", "Serialization"] uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" @@ -1264,20 +1284,20 @@ version = "0.1.1" [[deps.Static]] deps = ["IfElse"] -git-tree-sha1 = "46638763d3a25ad7818a15d441e0c3446a10742d" +git-tree-sha1 = "f94f9d627ba3f91e41a815b9f9f977d729e2e06f" uuid = "aedffcd0-7271-4cad-89d0-dc628f76c6d3" -version = "0.7.5" +version = "0.7.6" [[deps.StaticArrays]] deps = ["LinearAlgebra", "Random", "StaticArraysCore", "Statistics"] -git-tree-sha1 = "9f8a5dc5944dc7fbbe6eb4180660935653b0a9d9" +git-tree-sha1 = "efa8acd030667776248eabb054b1836ac81d92f0" uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.5.0" +version = "1.5.7" [[deps.StaticArraysCore]] -git-tree-sha1 = "66fe9eb253f910fe8cf161953880cfdaef01cdf0" +git-tree-sha1 = "ec2bd695e905a3c755b33026954b119ea17f2d22" uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" -version = "1.0.1" +version = "1.3.0" [[deps.Statistics]] deps = ["LinearAlgebra", "SparseArrays"] @@ -1285,15 +1305,15 @@ uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [[deps.StatsAPI]] deps = ["LinearAlgebra"] -git-tree-sha1 = "2c11d7290036fe7aac9038ff312d3b3a2a5bf89e" +git-tree-sha1 = "f9af7f195fb13589dd2e2d57fdb401717d2eb1f6" uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" -version = "1.4.0" +version = "1.5.0" [[deps.StatsBase]] deps = ["DataAPI", "DataStructures", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] -git-tree-sha1 = "48598584bacbebf7d30e20880438ed1d24b7c7d6" +git-tree-sha1 = "d1bf48bfcc554a3761a133fe3a9bb01488e06916" uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" -version = "0.33.18" +version = "0.33.21" [[deps.StatsFuns]] deps = ["ChainRulesCore", "HypergeometricFunctions", "InverseFunctions", "IrrationalConstants", "LogExpFunctions", "Reexport", "Rmath", "SpecialFunctions"] @@ -1307,6 +1327,12 @@ git-tree-sha1 = "ac730bd978bf35f9fe45daa0bd1f51e493e97eb4" uuid = "7792a7ef-975c-4747-a70f-980b88e8d1da" version = "0.3.15" +[[deps.StructArrays]] +deps = ["Adapt", "DataAPI", "StaticArraysCore", "Tables"] +git-tree-sha1 = "8c6ac65ec9ab781af05b08ff305ddc727c25f680" +uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" +version = "0.6.12" + [[deps.SuiteSparse]] deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"] uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9" @@ -1315,6 +1341,18 @@ uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9" deps = ["Dates"] uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +[[deps.TableTraits]] +deps = ["IteratorInterfaceExtensions"] +git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39" +uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" +version = "1.0.1" + +[[deps.Tables]] +deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits", "Test"] +git-tree-sha1 = "5ce79ce186cc678bbb5c5681ca3379d1ddae11a1" +uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" +version = "1.7.0" + [[deps.Tar]] deps = ["ArgTools", "SHA"] uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" @@ -1349,15 +1387,15 @@ version = "0.3.1" [[deps.TimerOutputs]] deps = ["ExprTools", "Printf"] -git-tree-sha1 = "464d64b2510a25e6efe410e7edab14fffdc333df" +git-tree-sha1 = "9dfcb767e17b0849d6aaf85997c98a5aea292513" uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -version = "0.5.20" +version = "0.5.21" [[deps.TranscodingStreams]] deps = ["Random", "Test"] -git-tree-sha1 = "216b95ea110b5972db65aa90f88d8d89dcb8851c" +git-tree-sha1 = "8a75929dcd3c38611db2f8d08546decb514fcadf" uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" -version = "0.9.6" +version = "0.9.9" [[deps.URIParser]] deps = ["Unicode"] @@ -1405,10 +1443,10 @@ uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" version = "1.5.2+0" [[deps.Zygote]] -deps = ["AbstractFFTs", "ChainRules", "ChainRulesCore", "DiffRules", "Distributed", "FillArrays", "ForwardDiff", "IRTools", "InteractiveUtils", "LinearAlgebra", "LogExpFunctions", "MacroTools", "NaNMath", "Random", "Requires", "SparseArrays", "SpecialFunctions", "Statistics", "ZygoteRules"] -git-tree-sha1 = "3cfdb31b517eec4173584fba2b1aa65daad46e09" +deps = ["AbstractFFTs", "ChainRules", "ChainRulesCore", "DiffRules", "Distributed", "FillArrays", "ForwardDiff", "GPUArrays", "GPUArraysCore", "IRTools", "InteractiveUtils", "LinearAlgebra", "LogExpFunctions", "MacroTools", "NaNMath", "Random", "Requires", "SparseArrays", "SpecialFunctions", "Statistics", "ZygoteRules"] +git-tree-sha1 = "c3f4af6b167f0c181148650221a4cbabf6bbb8a6" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.41" +version = "0.6.47" [[deps.ZygoteRules]] deps = ["MacroTools"] @@ -1427,10 +1465,10 @@ uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f" version = "1.6.38+0" [[deps.libsixel_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "78736dab31ae7a53540a6b752efc61f77b304c5b" +deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Pkg", "libpng_jll"] +git-tree-sha1 = "d4f63314c8aa1e48cd22aa0c17ed76cd1ae48c3c" uuid = "075b6546-f08a-558a-be8f-8157d0f608a5" -version = "1.8.6+1" +version = "1.10.3+0" [[deps.nghttp2_jll]] deps = ["Artifacts", "Libdl"] diff --git a/scripts/fourier_3d_compass.jl b/scripts/fourier_3d_compass.jl new file mode 100644 index 0000000..7d815c6 --- /dev/null +++ b/scripts/fourier_3d_compass.jl @@ -0,0 +1,224 @@ +# author: Ziyi Yin, ziyi.yin@gatech.edu +# This script trains a Fourier Neural Operator which maps 2D permeability distribution to time-varying CO2 concentration snapshots. +# The PDE is in 2D while FNO requires 3D FFT + +using DrWatson +@quickactivate "FNO4CO2" + +using FNO4CO2 +using PyPlot +using JLD2 +using Flux, Random, FFTW +using MAT, Statistics, LinearAlgebra +using CUDA +using ProgressMeter +using InvertibleNetworks:ActNorm +matplotlib.use("Agg") + +Random.seed!(1234) + +# Define raw data directory +mkpath(datadir("training-data")) +perm_path = datadir("training-data", "perm_compass.jld2") +conc_path = datadir("training-data", "conc_compass.jld2") +qgrid_path = datadir("training-data", "qgrid_compass.jld2") + +# Download the dataset into the data directory if it does not exist +if ~isfile(perm_path) + run(`wget https://www.dropbox.com/s/71r9oidb10georq/' + 'perm_compass.jld2 -q -O $perm_path`) +end +if ~isfile(conc_path) + run(`wget https://www.dropbox.com/s/kuuuyjhj439cynq/' + 'conc_compass.jld2 -q -O $conc_path`) +end +if ~isfile(qgrid_path) + run(`wget https://www.dropbox.com/s/9sekkojxt8fyslo/' + 'qgrid_compass.jld2 -q -O $qgrid_path`) +end + +JLD2.@load perm_path perm; +JLD2.@load conc_path conc; +JLD2.@load qgrid_path qgrid; + +nsamples = size(perm, 3) + +ntrain = 1000 +nvalid = 50 + +batch_size = 20 # effective one +computational_batch_size = 2 # of samples that still fit on GPU +grad_accum_iter = batch_size/computational_batch_size # accumulate these many gradients +learning_rate = 2f-4 + +epochs = 500 + +modes = [32, 16, 8] +width = 20 + +n = (size(perm, 1), size(perm, 2)) +#d = (12f0,12f0) # dx, dy in m +d = 1f0./n + +s = 1 + +nt = size(conc,1) +#dt = 20f0 # dt in day +dt = 1f0/(nt-1) + +AN = ActNorm(ntrain) +norm_perm = AN.forward(reshape(perm[1:s:end,1:s:end,1:ntrain], n[1], n[2], 1, ntrain)); + +y_train = permutedims(conc[1:nt,1:s:end,1:s:end,1:ntrain],[2,3,1,4]); +y_valid = permutedims(conc[1:nt,1:s:end,1:s:end,ntrain+1:ntrain+nvalid],[2,3,1,4]); + +grid = gen_grid(n, d, nt, dt) + +x_train = perm[1:s:end,1:s:end,1:ntrain]; +x_valid = perm[1:s:end,1:s:end,ntrain+1:ntrain+nvalid]; + +qgrid_train = qgrid[:,1:ntrain]; +qgrid_valid = qgrid[:,ntrain+1:ntrain+nvalid]; + +train_loader = Flux.Data.DataLoader((x_train, qgrid_train, y_train); batchsize = computational_batch_size, shuffle = true) +valid_loader = Flux.Data.DataLoader((x_valid, qgrid_valid, y_valid); batchsize = computational_batch_size, shuffle = true) + +NN = Net3d(modes, width; in_channels=5) +gpu_flag && (global NN = NN |> gpu) + +Flux.trainmode!(NN, true) +w = Flux.params(NN) + +opt = Flux.Optimise.ADAMW(learning_rate, (0.9f0, 0.999f0), 1f-4) +nbatches = Int(floor(ntrain/computational_batch_size)) + +Loss = zeros(Float32,epochs*nbatches) +Loss_valid = zeros(Float32, epochs) +prog = Progress(ntrain * epochs) + +# Define result directory + +sim_name = "3D_FNO" +exp_name = "2phaseflow-compass" + +save_dict = @strdict exp_name +plot_path = plotsdir(sim_name, savename(save_dict; digits=6)) + +# plot figure +x_plot = x_valid[:, :, 1:1] +q_plot = qgrid_valid[:,1:1] +y_plot = y_valid[:, :, :, 1] + +function q_tensorize(q::Matrix{Int64}) + q_tensor = zeros(Float32, n[1], n[2], nt, 1, size(q,2)); + for i = 1:size(q,2) + q_tensor[q[1,i],q[2,i],:,1,i] .= 3f-1 ## q location, injection rate = f-1 + end + return q_tensor +end + +## training +iter = 0 +for ep = 1:epochs + Base.flush(Base.stdout) + + ## update + Flux.trainmode!(NN, true); + for (x,q,y) in train_loader + global iter = iter + 1 + x = cat(perm_to_tensor(x,grid,AN), q_tensorize(q), dims=4) + if gpu_flag + x = x |> gpu + y = y |> gpu + end + grads = gradient(w) do + global loss = norm(relu01(NN(x))-y)/norm(y) + return loss + end + (iter==1) && (global grads_sum = 0f0 .* grads) + global grads_sum = grads_sum .+ grads + Loss[iter] = loss + if mod(iter, grad_accum_iter) == 0 + for p in w + Flux.Optimise.update!(opt, p, grads_sum[p]) + end + grads_sum = 0f0 .* grads_sum + end + ProgressMeter.next!(prog; showvalues = [(:loss, loss), (:epoch, ep), (:iter, iter)]) + end + + Flux.testmode!(NN, true); + y_predict = relu01(NN(cat(perm_to_tensor(x_plot,grid,AN), q_tensorize(q_plot), dims=4)|>gpu))|>cpu + + fig, ax = subplots(4,5,figsize=(20, 12)) + + for i = 1:5 + ax[1, i][:axis]("off") + ax[1, i].imshow(x_plot[:,:,1]') + title("x") + + ax[2, i][:axis]("off") + ax[2, i].imshow(y_plot[:,:,4*i+1,1]', vmin=0, vmax=1) + title("true y") + + ax[3, i][:axis]("off") + ax[3, i].imshow(y_predict[:,:,4*i+1,1]', vmin=0, vmax=1) + title("predict y") + + ax[4, i][:axis]("off") + ax[4, i].imshow(5f0 .* abs.(y_plot[:,:,4*i+1,1]'-y_predict[:,:,4*i+1,1]'), vmin=0, vmax=1) + title("5X abs difference") + + end + tight_layout() + fig_name = @strdict ep batch_size Loss modes width learning_rate epochs s n d nt dt AN ntrain nvalid computational_batch_size + safesave(joinpath(plot_path, savename(fig_name; digits=6)*"_3Dfno_fitting.png"), fig); + close(fig) + + NN_save = NN |> cpu + w_save = Flux.params(NN_save) + + for (x,q,y) in valid_loader + Loss_valid[ep] = norm(relu01(NN_save(cat(perm_to_tensor(x,grid,AN), q_tensorize(q), dims=4)))-y)/norm(y) + break + end + + loss_train = Loss[1:ep*nbatches] + loss_valid = Loss_valid[1:ep] + fig = figure(figsize=(20, 12)) + subplot(1,3,1) + plot(loss_train) + title("training loss at epoch $ep") + subplot(1,3,2) + plot(1:nbatches:nbatches*ep, loss_valid); + title("validation loss at epoch $ep") + subplot(1,3,3) + plot(loss_train); + plot(1:nbatches:nbatches*ep, loss_valid); + xlabel("iterations") + ylabel("value") + title("Objective function at epoch $ep") + legend(["training", "validation"]) + tight_layout(); + safesave(joinpath(plot_path, savename(fig_name; digits=6)*"_3Dfno_loss.png"), fig); + close(fig); + + param_dict = @strdict ep NN_save w_save batch_size Loss modes width learning_rate epochs s n d nt dt AN ntrain nvalid loss_train loss_valid computational_batch_size + @tagsave( + datadir(sim_name, savename(param_dict, "jld2"; digits=6)), + param_dict; + safe=true + ) + +end + +NN_save = NN |> cpu; +w_save = params(NN_save); + +final_dict = @strdict Loss Loss_valid epochs NN_save w_save batch_size Loss modes width learning_rate s n d nt dt AN ntrain nvalid computational_batch_size; + +@tagsave( + datadir(sim_name, savename(final_dict, "jld2"; digits=6)), + final_dict; + safe=true +); diff --git a/scripts/fourier_3d_compass_grad.jl b/scripts/fourier_3d_compass_grad.jl new file mode 100644 index 0000000..5443021 --- /dev/null +++ b/scripts/fourier_3d_compass_grad.jl @@ -0,0 +1,280 @@ +# author: Ziyi Yin, ziyi.yin@gatech.edu +# This script trains a Fourier Neural Operator which maps 2D permeability distribution to time-varying CO2 concentration snapshots. +# The PDE is in 2D while FNO requires 3D FFT + +using DrWatson +@quickactivate "FNO4CO2" + +using FNO4CO2 +using PyPlot +using JLD2 +using Flux, Random, FFTW +using MAT, Statistics, LinearAlgebra +using CUDA +using ProgressMeter +using InvertibleNetworks:ActNorm +using LineSearches +using PyCall +@pyimport numpy.ma as ma +matplotlib.use("Agg") + +Random.seed!(1234) + +# Define raw data directory +mkpath(datadir("training-data")) +perm_path = datadir("training-data", "perm_compass.jld2") +conc_path = datadir("training-data", "conc_compass.jld2") +qgrid_path = datadir("training-data", "qgrid_compass.jld2") + +# Download the dataset into the data directory if it does not exist +if ~isfile(perm_path) + run(`wget https://www.dropbox.com/s/71r9oidb10georq/' + 'perm_compass.jld2 -q -O $perm_path`) +end +if ~isfile(conc_path) + run(`wget https://www.dropbox.com/s/kuuuyjhj439cynq/' + 'conc_compass.jld2 -q -O $conc_path`) +end +if ~isfile(qgrid_path) + run(`wget https://www.dropbox.com/s/9sekkojxt8fyslo/' + 'qgrid_compass.jld2 -q -O $qgrid_path`) +end + +JLD2.@load perm_path perm; +JLD2.@load conc_path conc; +JLD2.@load qgrid_path qgrid; + +nsamples = size(perm, 3) + +ntrain = 1000 +nvalid = 50 + +batch_size = 20 # effective one +computational_batch_size = 2 # of samples that still fit on GPU +grad_accum_iter = batch_size/computational_batch_size # accumulate these many gradients +learning_rate = 2f-4 + +epochs = 500 + +modes = [32, 16, 8] +width = 20 + +n = (size(perm, 1), size(perm, 2)) +#d = (12f0,12f0) # dx, dy in m +d = 1f0./n + +s = 1 + +nt = size(conc,1) +#dt = 20f0 # dt in day +dt = 1f0/(nt-1) + +JLD2.@load "../data/3D_FNO/batch_size=20_computational_batch_size=2_dt=0.05_ep=150_epochs=500_learning_rate=0.0002_nt=21_ntrain=1000_nvalid=50_s=1_width=20.jld2"; + +y_train = permutedims(conc[1:nt,1:s:end,1:s:end,1:ntrain],[2,3,1,4]); +y_valid = permutedims(conc[1:nt,1:s:end,1:s:end,ntrain+1:ntrain+nvalid],[2,3,1,4]); + +grid = gen_grid(n, d, nt, dt) + +x_train = perm[1:s:end,1:s:end,1:ntrain]; +x_valid = perm[1:s:end,1:s:end,ntrain+1:ntrain+nvalid]; + +qgrid_train = qgrid[:,1:ntrain]; +qgrid_valid = qgrid[:,ntrain+1:ntrain+nvalid]; + +train_loader = Flux.Data.DataLoader((x_train, qgrid_train, y_train); batchsize = computational_batch_size, shuffle = true); +valid_loader = Flux.Data.DataLoader((x_valid, qgrid_valid, y_valid); batchsize = computational_batch_size, shuffle = true); + +# load the network +JLD2.@load "../data/3D_FNO/batch_size=20_computational_batch_size=2_dt=0.05_ep=150_epochs=500_learning_rate=0.0002_nt=21_ntrain=1000_nvalid=50_s=1_width=20.jld2"; +NN = deepcopy(NN_save); +Flux.testmode!(NN, true); +gpu_flag && (global NN = NN |> gpu); + +opt = Flux.Optimise.ADAMW(learning_rate, (0.9f0, 0.999f0), 1f-4); +nbatches = Int(floor(ntrain/computational_batch_size)) + +Loss = zeros(Float32,epochs*nbatches) +Loss_valid = zeros(Float32, epochs) +prog = Progress(ntrain * epochs) + +# Define result directory + +sim_name = "FNOinversion" +exp_name = "2phaseflow-compass" + +save_dict = @strdict exp_name +plot_path = plotsdir(sim_name, savename(save_dict; digits=6)) + +# plot figure +x_plot = x_valid[:, :, 1:1]; +q_plot = qgrid_valid[:,1:1]; +y_plot = y_valid[:, :, :, 1]; + +function q_tensorize(q::Matrix{Int64}) + q_tensor = zeros(Float32, n[1], n[2], nt, 1, size(q,2)); + for i = 1:size(q,2) + q_tensor[q[1,i],q[2,i],:,1,i] .= 3f-1 ## q location, injection rate = 3f-1 + end + return q_tensor |> gpu +end +q_tensorize(q::Vector{Int64}) = q_tensorize(reshape(q, :, 1)) + +@time y_predict = relu01(NN(cat(perm_to_tensor(x_plot|>gpu,grid,AN|>gpu), q_tensorize(q_plot), dims=4)))|>cpu; + +function plotK(K, ax) + p1 = pycall(ma.masked_greater, Any, K, 50) + p2 = pycall(ma.masked_less, Any, K, 50) + p3 = pycall(ma.masked_greater, Any, K, 10) + + ax.imshow(p2,interpolation="None",cmap="Reds",vmin=240,vmax=280) + ax.imshow(p1,interpolation="None",cmap="winter",vmin=16,vmax=18); + ax.imshow(p3,interpolation="None",cmap="Greys",vmin=0f0,vmax=1f-8) +end + +fig, ax = subplots(4,5,figsize=(20, 12)) + +for i = 1:5 + ax[1, i][:axis]("off") + plotK(x_plot[:,:,1]', ax[1,i]) + ax[1, i].set_title("x") + + ax[2, i][:axis]("off") + ax[2, i].imshow(y_plot[:,:,4*i+1,1]', vmin=0, vmax=1) + ax[2, i].set_title("true y") + + ax[3, i][:axis]("off") + ax[3, i].imshow(y_predict[:,:,4*i+1,1]', vmin=0, vmax=1) + ax[3, i].set_title("predict y") + + ax[4, i][:axis]("off") + ax[4, i].imshow(5f0 .* abs.(y_plot[:,:,4*i+1,1]'-y_predict[:,:,4*i+1,1]'), vmin=0, vmax=1) + ax[4, i].set_title("5X abs difference") + +end + +tight_layout() + +# plot figure +x_true = perm[:, :, end]; +q_true = qgrid[:,end]; +y_true = permutedims(conc[:, :, :, end], [2,3,1]); +gpu_flag && (global y_true = y_true |> gpu); +x = mean(perm[:,:,1:ntrain], dims=3)[:,:,1] |> gpu; +gpu_flag && (global x = x |> gpu); +x_init = deepcopy(x) |> cpu; + +qtensor = q_tensorize(q_true); +function S(x) + return relu01(NN(cat(perm_to_tensor(x,grid,AN|>gpu), qtensor, dims=4))); +end + +# function value +function f(x) + println("evaluate f") + loss = 0.5f0 * norm(S(x)-y_true)^2f0 + return loss +end + +# set up plots +niterations = 100 + +hisloss = zeros(Float32, niterations+1) +ls = BackTracking(c_1=1f-4,iterations=10,maxstep=Inf32,order=3,ρ_hi=5f-1,ρ_lo=1f-1) +α = 1f1 +### backtracking line search +prog = Progress(niterations) +for j=1:niterations + + p = Flux.params(x) + + @time grads = gradient(p) do + global loss = f(x) + println("evaluate g") + return loss + end + (j==1) && (hisloss[1] = loss) + g = grads.grads[x] + gnorm = -g/norm(g, Inf) + + println("iteration no: ",j,"; function value: ",loss) + + # linesearch + function ϕ(α) + misfit = f(x .+ α .* gnorm) + @show α, misfit + return misfit + end + try + global step, fval = ls(ϕ, α, loss, dot(g, gnorm)) + catch e + println("linesearch failed at iteration: ",j) + global niterations = j + hisloss[j+1] = loss + break + end + global α = 1.2f0 * step + hisloss[j+1] = fval + + # Update model and bound projection + global x .= x .+ step .* gnorm + + ProgressMeter.next!(prog; showvalues = [(:loss, fval), (:iter, j), (:steplength, step)]) + +end + +y_predict = S(x); + +## compute true and plot +SNR = -2f1 * log10(norm(x_true-x)/norm(x_true)) +fig = figure(figsize=(20,12)); +subplot(2,2,1); +imshow(x',vmin=20,vmax=120);title("inversion by NN, $(niterations) iter");colorbar(); +subplot(2,2,2); +imshow(x_true',vmin=20,vmax=120);title("GT permeability");colorbar(); +subplot(2,2,3); +imshow(x_init',vmin=20,vmax=120);title("initial permeability");colorbar(); +subplot(2,2,4); +imshow(5*abs.(x_true'-x'),vmin=20,vmax=120);title("5X error, SNR=$SNR");colorbar(); +suptitle("MLE (no prior)") +tight_layout() + +sim_name = "FNOinversion" +exp_name = "2phaseflow" + +save_dict = @strdict exp_name +plot_path = plotsdir(sim_name, savename(save_dict; digits=6)) + +fig_name = @strdict proj nv niterations α +safesave(joinpath(plot_path, savename(fig_name; digits=6)*"_3Dfno_inv.png"), fig); + + +## loss +fig = figure(figsize=(20,12)); +plot(hisloss[1:niterations+1]);title("loss"); +suptitle("MLE (no prior)") +tight_layout() + +safesave(joinpath(plot_path, savename(fig_name; digits=6)*"_3Dfno_loss.png"), fig); + +## data fitting +fig = figure(figsize=(20,12)); +for i = 1:5 + subplot(4,5,i); + imshow(y_init[:,:,10*i+1]', vmin=0, vmax=1); + title("initial prediction at snapshot $(10*i+1)") + subplot(4,5,i+5); + imshow(y_true[:,:,10*i+1]', vmin=0, vmax=1); + title("true at snapshot $(10*i+1)") + subplot(4,5,i+10); + imshow(y_predict[:,:,10*i+1]', vmin=0, vmax=1); + title("predict at snapshot $(10*i+1)") + subplot(4,5,i+15); + imshow(5*abs.(yobs[:,:,10*i+1]'-y_predict[:,:,10*i+1]'), vmin=0, vmax=1); + title("5X diff at snapshot $(10*i+1)") +end +suptitle("MLE (no prior)") +tight_layout() +safesave(joinpath(plot_path, savename(fig_name; digits=6)*"_3Dfno_fit.png"), fig); + + diff --git a/scripts/train_NF.jl b/scripts/train_NF.jl index 8e32a15..05ad257 100644 --- a/scripts/train_NF.jl +++ b/scripts/train_NF.jl @@ -42,27 +42,28 @@ save_path = plotsdir(sim_name, savename(save_dict; digits=6)) # Training hyperparameters nepochs = 500 -batch_size = 90 -lr = 1f-3 +batch_size = 50 +lr = 2f-3 lr_step = 10 gab_l2 = true λ = 1f-1 #architecture parametrs -L = 6 +L = 5 K = 6 n_hidden = 32 low = 0.5f0 max_recursion = 1 +clip_norm = 5 #data augmentation -noiseLev = 0.02f0 +noiseLev = 0.005f0 # Random seed Random.seed!(2022) # load in training dataz -ntrain = Int(round(0.9 * size(perm, 3))) +ntrain = Int(round(0.5 * size(perm, 3))) nvalid = Int(round(0.05 * size(perm, 3))) train_x = perm[:,:,1:ntrain]; @@ -100,7 +101,7 @@ nbatches = cld(ntrain, batch_size) train_loader = DataLoader(train_idx, batchsize=batch_size, shuffle=false) # Optimizer -opt = Optimiser(ExpDecay(lr, .99f0, nbatches*lr_step, 1f-6), ADAM(lr)) +opt = Optimiser(ExpDecay(lr, .99f0, nbatches*lr_step, 1f-6), ClipNorm(clip_norm),ADAM(lr)) t = G.forward(X_train_latent); # to initialize actnorm θ = get_params(G); @@ -137,7 +138,6 @@ for e=1:nepochs G.backward((Zx / batch_size)[:], (Zx)[:]) GC.gc() - print("Iter: epoch=", e, "/", nepochs, ", batch=", b, "/", nbatches, "; f l2 = ", floss[b,e], @@ -184,16 +184,16 @@ for e=1:nepochs fig = figure(figsize=(14, 12)) - subplot(4,5,1); imshow(X_gen[:,:,1,1], aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,1); imshow(X_gen[:,:,1,1]', aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,2); imshow(X_gen[:,:,1,2], aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,2); imshow(X_gen[:,:,1,2]', aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,3); imshow(X_train_latent[:,:,1,1] |> cpu, aspect=1, vmin=20,vmax=120, resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,3); imshow(X_train_latent[:,:,1,1]' |> cpu, aspect=1, vmin=20,vmax=120, resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x_{train1} \sim p(x)$") - subplot(4,5,4); imshow(ZX_train_sq[:,:,1,1], aspect=1, resample=true, interpolation="none", filterrad=1, + subplot(4,5,4); imshow(ZX_train_sq[:,:,1,1]', aspect=1, resample=true, interpolation="none", filterrad=1, vmin=-3, vmax=3, cmap="seismic"); axis("off"); title(L"$z_{train1} = G^{-1}(x_{train1})$ "*string("\n")*" mean "*string(mean_train_1)*" std "*string(std_train_1)); @@ -208,17 +208,17 @@ for e=1:nepochs title(L"qq plot with $z_{train1}$"); xlim(-5,5); ylim(-5,5); #xlabel("Theoretical Quantiles"); ylabel("Sample Quantiles") - subplot(4,5,6); imshow(X_gen[:,:,1,3], aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,6); imshow(X_gen[:,:,1,3]', aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,7); imshow(X_gen[:,:,1,4], aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,7); imshow(X_gen[:,:,1,4]', aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,8); imshow(X_train_latent[:,:,1,2]|> cpu, aspect=1, vmin=20,vmax=120, resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,8); imshow(X_train_latent[:,:,1,2]' |> cpu, aspect=1, vmin=20,vmax=120, resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x_{train2} \sim p(x)$") - subplot(4,5,9) ;imshow(ZX_train_sq[:,:,1,2], aspect=1, resample=true, interpolation="none", filterrad=1, + subplot(4,5,9) ;imshow(ZX_train_sq[:,:,1,2]', aspect=1, resample=true, interpolation="none", filterrad=1, vmin=-3, vmax=3, cmap="seismic"); axis("off"); title(L"$z_{train2} = G^{-1}(x_{train2})$ "*string("\n")*" mean "*string(mean_train_2)*" std "*string(std_train_2)); @@ -232,17 +232,17 @@ for e=1:nepochs title(L"qq plot with $z_{train2}$"); xlim(-5,5);ylim(-5,5); #xlabel("Theoretical Quantiles"); ylabel("Sample Quantiles") - subplot(4,5,11); imshow(X_gen[:,:,1,5], aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,11); imshow(X_gen[:,:,1,5]', aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,12); imshow(X_gen[:,:,1,6], aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,12); imshow(X_gen[:,:,1,6]', aspect=1, vmin=20,vmax=120,resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,13); imshow(X_test_latent[:,:,1,1]|> cpu, aspect=1, vmin=20,vmax=120, resample=true, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,13); imshow(X_test_latent[:,:,1,1]' |> cpu, aspect=1, vmin=20,vmax=120, resample=true, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x_{test1} \sim p(x)$") - subplot(4,5,14) ;imshow(ZX_test_sq[:,:,1,1], aspect=1, resample=true, interpolation="none", filterrad=1, + subplot(4,5,14) ;imshow(ZX_test_sq[:,:,1,1]', aspect=1, resample=true, interpolation="none", filterrad=1, vmin=-3, vmax=3, cmap="seismic"); axis("off"); title(L"$z_{test1} = G^{-1}(x_{test1})$ "*string("\n")*" mean "*string(mean_test_1)*" std "*string(std_test_1)); @@ -257,17 +257,16 @@ for e=1:nepochs #xlabel("Theoretical Quantiles"); ylabel("Sample Quantiles") - subplot(4,5,16); imshow(X_gen[:,:,1,7], aspect=1, vmin=20,vmax=120, + subplot(4,5,16); imshow(X_gen[:,:,1,7]', aspect=1, vmin=20,vmax=120, interpolation="none", filterrad=1, cmap="gray"); axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,17); imshow(X_gen[:,:,1,8], aspect=1, vmin=20,vmax=120, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,17); imshow(X_gen[:,:,1,8]', aspect=1, vmin=20,vmax=120, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x\sim p_{\theta}(x)$") - subplot(4,5,18); imshow(X_test_latent[:,:,1,2]|> cpu, aspect=1, vmin=20,vmax=120, interpolation="none", filterrad=1, cmap="gray") + subplot(4,5,18); imshow(X_test_latent[:,:,1,2]' |> cpu, aspect=1, vmin=20,vmax=120, interpolation="none", filterrad=1, cmap="gray") axis("off"); title(L"$x_{test2} \sim p(x)$") - - - subplot(4,5,19) ;imshow(ZX_test_sq[:,:,1,2], aspect=1, interpolation="none", filterrad=1, + + subplot(4,5,19) ;imshow(ZX_test_sq[:,:,1,2]', aspect=1, interpolation="none", filterrad=1, vmin=-3, vmax=3, cmap="seismic"); axis("off"); title(L"$z_{test2} = G^{-1}(x_{test2})$ "*string("\n")*" mean "*string(mean_test_2)*" std "*string(std_test_2)); @@ -283,7 +282,7 @@ for e=1:nepochs tight_layout() - fig_name = @strdict e gab_l2 λ lr lr_step noiseLev n_hidden L K max_recursion + fig_name = @strdict e gab_l2 λ lr lr_step noiseLev n_hidden L K max_recursion clip_norm safesave(joinpath(save_path, savename(fig_name; digits=6)*"_hint_latent.png"), fig); close(fig) close(fig) @@ -292,7 +291,7 @@ for e=1:nepochs if(mod(e,intermediate_save_params)==0) # Saving parameters and logs Params = get_params(G) |> cpu - save_dict = @strdict e nepochs lr lr_step gab_l2 λ noiseLev n_hidden L K max_recursion Params floss flogdet + save_dict = @strdict e nepochs lr lr_step gab_l2 λ noiseLev n_hidden L K max_recursion Params floss flogdet clip_norm @tagsave( datadir(sim_name, savename(save_dict, "jld2"; digits=6)), save_dict; @@ -337,7 +336,7 @@ for e=1:nepochs tight_layout() - fig_name = @strdict nepochs e lr lr_step gab_l2 λ noiseLev max_recursion n_hidden L K + fig_name = @strdict nepochs e lr lr_step gab_l2 λ noiseLev max_recursion n_hidden L K clip_norm safesave(joinpath(save_path, savename(fig_name; digits=6)*"mnist_hint_log.png"), fig); close(fig) close(fig) diff --git a/src/FNOstruct.jl b/src/FNOstruct.jl index e5cf4f2..5b7b8a8 100644 --- a/src/FNOstruct.jl +++ b/src/FNOstruct.jl @@ -96,13 +96,13 @@ end @Flux.functor Net2d ### 2D FNO constructor -function Net2d(modes::Int64, width::Int64; in_channels::Int64=3, out_channels::Int64=1, mid_channels::Int64=128) +function Net2d(modes::Vector{Int64}, width::Int64; in_channels::Int64=3, out_channels::Int64=1, mid_channels::Int64=128) return Net2d( Conv((1, 1), in_channels=>width), - SpectralConv(width, width, [modes, modes]), - SpectralConv(width, width, [modes, modes]), - SpectralConv(width, width, [modes, modes]), - SpectralConv(width, width, [modes, modes]), + SpectralConv(width, width, modes), + SpectralConv(width, width, modes), + SpectralConv(width, width, modes), + SpectralConv(width, width, modes), Conv((1, 1), width=>width), Conv((1, 1), width=>width), Conv((1, 1), width=>width), @@ -116,6 +116,8 @@ function Net2d(modes::Int64, width::Int64; in_channels::Int64=3, out_channels::I ) end +Net2d(modes::Int64, width::Int64; in_channels::Int64=3, out_channels::Int64=1, mid_channels::Int64=128) = Net3d([modes,modes], width; in_channels=in_channels, out_channels=out_channels, mid_channels=mid_channels) + ### 2D FNO forward evaluation function (B::Net2d{T})(x::AbstractArray{T,4}) where T x = B.fc0(x) @@ -163,13 +165,13 @@ end @Flux.functor Net3d ### 3D FNO Constructor -function Net3d(modes::Int64, width::Int64; in_channels::Int64=4, out_channels::Int64=1, mid_channels::Int64=128) +function Net3d(modes::Vector{Int64}, width::Int64; in_channels::Int64=4, out_channels::Int64=1, mid_channels::Int64=128) return Net3d( Conv((1, 1, 1), in_channels=>width), - SpectralConv(width, width, [modes, modes, modes]), - SpectralConv(width, width, [modes, modes, modes]), - SpectralConv(width, width, [modes, modes, modes]), - SpectralConv(width, width, [modes, modes, modes]), + SpectralConv(width, width, modes), + SpectralConv(width, width, modes), + SpectralConv(width, width, modes), + SpectralConv(width, width, modes), Conv((1, 1, 1), width=>width), Conv((1, 1, 1), width=>width), Conv((1, 1, 1), width=>width), @@ -184,6 +186,8 @@ function Net3d(modes::Int64, width::Int64; in_channels::Int64=4, out_channels::I return block end +Net3d(modes::Int64, width::Int64; in_channels::Int64=4, out_channels::Int64=1, mid_channels::Int64=128) = Net3d([modes,modes,modes], width; in_channels=in_channels, out_channels=out_channels, mid_channels=mid_channels) + ### 3D FNO forward evaluation function (B::Net3d{T})(x::AbstractArray{T,5}) where T x = B.fc0(x)