diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 700707ce..d60f0707 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,4 +4,4 @@ updates: - package-ecosystem: "github-actions" directory: "/" # Location of package manifests schedule: - interval: "weekly" + interval: "monthly" diff --git a/.github/workflows/SpellCheck.yml b/.github/workflows/SpellCheck.yml new file mode 100644 index 00000000..2dfaaa86 --- /dev/null +++ b/.github/workflows/SpellCheck.yml @@ -0,0 +1,13 @@ +name: Spell Check + +on: [push, pull_request, workflow_dispatch] + +jobs: + typos-check: + name: Spell Check with Typos + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v4 + - name: Check spelling + uses: crate-ci/typos@v1.18.0 diff --git a/.github/workflows/blank.yml b/.github/workflows/blank.yml index a7ad912f..8a48b4b3 100644 --- a/.github/workflows/blank.yml +++ b/.github/workflows/blank.yml @@ -14,8 +14,9 @@ jobs: fail-fast: false matrix: version: - - '1.8' - '1.9' + - '1.10' + - 'nightly' os: - ubuntu-latest - macOS-latest @@ -23,12 +24,12 @@ jobs: arch: - x64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: julia-actions/setup-julia@v1 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v3 + - uses: actions/cache@v4 env: cache-name: cache-artifacts with: diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 1c88a632..0312a351 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -11,7 +11,7 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: julia-actions/setup-julia@latest with: version: '1.9' diff --git a/Project.toml b/Project.toml index 7b9b763a..ec30d546 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "GeophysicalModelGenerator" uuid = "3700c31b-fa53-48a6-808a-ef22d5a84742" authors = ["Boris Kaus", "Marcel Thielmann"] -version = "0.5.5" +version = "0.5.9" [deps] Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" @@ -20,30 +20,38 @@ MeshIO = "7269a6da-0436-5bbc-96c2-40638cbb6118" NearestNeighbors = "b8a86587-4115-5ab1-83bc-aa920d37bbce" Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" -Requires = "ae029012-a4dd-5104-9daa-d747884805df" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" WriteVTK = "64499a7a-5c06-52f2-abe2-ccb03c286192" +[weakdeps] +GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a" +GMT = "5752ebe1-31b9-557e-87aa-f909b540aa54" + +[extensions] +GLMakie_Visualisation = "GLMakie" +GMT_utils = "GMT" + [compat] Colors = "0.9 - 0.12" Downloads = "1" FileIO = "1" +GLMakie = "0.8, 0.9" GMT = "1" GeoParams = "0.2 - 0.5" Geodesy = "1" GeometryBasics = "0.1 - 0.4" Glob = "1.2 - 1.3" ImageIO = "0.1 - 0.6" -Interpolations = "0.13, 0.14" +Interpolations = "0.14, 0.15" JLD2 = "0.4" MeshIO = "0.1 - 0.4" NearestNeighbors = "0.2 - 0.4" Parameters = "0.9 - 0.12" -Requires = "1.0, 2" SpecialFunctions = "1.0, 2" WriteVTK = "1" -julia = "1.8" +julia = "1.9" [extras] GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a" @@ -51,4 +59,4 @@ GMT = "5752ebe1-31b9-557e-87aa-f909b540aa54" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test"] +test = ["Test","GMT"] diff --git a/README.md b/README.md index 30964b7e..4777d951 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://juliageodynamics.github.io/GeophysicalModelGenerator.jl/dev) [![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://juliageodynamics.github.io/GeophysicalModelGenerator.jl/dev/) [![Build Status](https://github.com/JuliaGeodynamics/GeophysicalModelGenerator.jl/workflows/CI/badge.svg)](https://github.com/JuliaGeodynamics/GeophysicalModelGenerator.jl/actions) +[![DOI](https://zenodo.org/badge/366377223.svg)](https://zenodo.org/doi/10.5281/zenodo.8074345) ![GMG_LogoWide](https://github.com/JuliaGeodynamics/GeophysicalModelGenerator.jl/assets/1148509/0fa80120-0f99-4eb2-bb00-60551ea3b6c4) diff --git a/_typos.toml b/_typos.toml new file mode 100644 index 00000000..0d81f24f --- /dev/null +++ b/_typos.toml @@ -0,0 +1,3 @@ +[default.extend-words] +MOR = "MOR" +dum = "dum" diff --git a/docs/gmt.history b/docs/gmt.history new file mode 100644 index 00000000..b53e2c35 --- /dev/null +++ b/docs/gmt.history @@ -0,0 +1,8 @@ +# GMT 6 Session common arguments shelf +BEGIN GMT 6.5.0 +B afWSen +J X +JX X15c/0 +R 0/1/0.1/1 +@L 1 +END diff --git a/docs/paper/paper.md b/docs/paper/paper.md new file mode 100644 index 00000000..02aae798 --- /dev/null +++ b/docs/paper/paper.md @@ -0,0 +1,119 @@ +--- +title: 'GeophysicalModelGenerator.jl: A Julia package to visualize geoscientific data and generate model ' +tags: + - Julia + - geosciences + - geodynamics + - tectonics + - geophysics +authors: + - name: Boris J.P. Kaus + orcid: 0000-0002-0247-8660 + affiliation: 1 # (Multiple affiliations must be quoted) + - name: Marcel Thielmann + orcid: 0000-0003-1185-3730 + affiliation: 2 + - name: Arne Spang + orcid: 0000-0002-6107-0403 + affiliation: 2 + - name: Luca de Siena + orcid: 0000-0002-3615-5923 + affiliation: 3 + - name: Pascal Aellig + #orcid: 0000-0003-1185-3730 + affiliation: 1 + - name: Jacob Frasukiewicz + #orcid: 0000-0003-1185-3730 + affiliation: 1 +affiliations: + - name: Johannes Gutenberg University Mainz, Germany + index: 1 + - name: University of Bayreuth, Germany + index: 2 + - name: Bologna University, Italy + index: 3 + +date: 27 February 2024 +bibliography: paper.bib +--- + +# Summary + +The forces on stars, galaxies, and dark matter under external gravitational +fields lead to the dynamical evolution of structures in the universe. The orbits +of these bodies are therefore key to understanding the formation, history, and +future state of galaxies. The field of "galactic dynamics," which aims to model +the gravitating components of galaxies to study their structure and evolution, +is now well-established, commonly taught, and frequently used in astronomy. +Aside from toy problems and demonstrations, the majority of problems require +efficient numerical tools, many of which require the same base code (e.g., for +performing numerical orbit integration). + +# Statement of need + +`Gala` is an Astropy-affiliated Python package for galactic dynamics. Python +enables wrapping low-level languages (e.g., C) for speed without losing +flexibility or ease-of-use in the user-interface. The API for `Gala` was +designed to provide a class-based and user-friendly interface to fast (C or +Cython-optimized) implementations of common operations such as gravitational +potential and force evaluation, orbit integration, dynamical transformations, +and chaos indicators for nonlinear dynamics. `Gala` also relies heavily on and +interfaces well with the implementations of physical units and astronomical +coordinate systems in the `Astropy` package [@astropy] (`astropy.units` and +`astropy.coordinates`). + +`Gala` was designed to be used by both astronomical researchers and by +students in courses on gravitational dynamics or astronomy. It has already been +used in a number of scientific publications [@Pearson:2017] and has also been +used in graduate courses on Galactic dynamics to, e.g., provide interactive +visualizations of textbook material [@Binney:2008]. The combination of speed, +design, and support for Astropy functionality in `Gala` will enable exciting +scientific explorations of forthcoming data releases from the *Gaia* mission +[@gaia] by students and experts alike. + +# Mathematics + +Single dollars ($) are required for inline mathematics e.g. $f(x) = e^{\pi/x}$ + +Double dollars make self-standing equations: + +$$\Theta(x) = \left\{\begin{array}{l} +0\textrm{ if } x < 0\cr +1\textrm{ else} +\end{array}\right.$$ + +You can also use plain \LaTeX for equations +\begin{equation}\label{eq:fourier} +\hat f(\omega) = \int_{-\infty}^{\infty} f(x) e^{i\omega x} dx +\end{equation} +and refer to \autoref{eq:fourier} from text. + +# Citations + +Citations to entries in paper.bib should be in +[rMarkdown](http://rmarkdown.rstudio.com/authoring_bibliographies_and_citations.html) +format. + +If you want to cite a software repository URL (e.g. something on GitHub without a preferred +citation) then you can do it with the example BibTeX entry below for @fidgit. + +For a quick reference, the following citation commands can be used: +- `@author:2001` -> "Author et al. (2001)" +- `[@author:2001]` -> "(Author et al., 2001)" +- `[@author1:2001; @author2:2001]` -> "(Author1 et al., 2001; Author2 et al., 2002)" + +# Figures + +Figures can be included like this: +![Caption for example figure.\label{fig:example}](figure.png) +and referenced from text using \autoref{fig:example}. + +Figure sizes can be customized by adding an optional second parameter: +![Caption for example figure.](figure.png){ width=20% } + +# Acknowledgements + +We acknowledge contributions from Brigitta Sipocz, Syrtis Major, and Semyeong +Oh, and support from Kathryn Johnston during the genesis of this project. + +# References \ No newline at end of file diff --git a/docs/src/man/LaPalma_example.md b/docs/src/man/LaPalma_example.md index 946d9441..aaf34c79 100644 --- a/docs/src/man/LaPalma_example.md +++ b/docs/src/man/LaPalma_example.md @@ -28,70 +28,54 @@ Topo = ImportTopo(lon = [-18.7, -17.1], lat=[28.0, 29.2], file="@earth_relief_03 ```` ```` -GeoData +GeoData size : (1921, 1441, 1) lon ϵ [ 341.3 : 342.9] lat ϵ [ 28.0 : 29.2] depth ϵ [ -4.38297802734375 km : 2.414 km] fields: (:Topography,) - -```` - -In case you did not manage that, we have prepared a JLD2 file [here](https://seafile.rlp.net/f/03286a46a9f040a69b0f/?dl=1). -Download it, and doublecheck that you are in the same directory as the data file with: - -````julia -pwd() -```` - -```` -"/Users/kausb/.julia/dev/GeophysicalModelGenerator/docs/src/scripts" ```` -Load the data: - -````julia -Topo=load("Topo_LaPalma.jld2","Topo") -```` - -```` -GeoData - size : (1921, 1441, 1) - lon ϵ [ 341.3 : 342.9] - lat ϵ [ 28.0 : 29.2] - depth ϵ [ -4.38297802734375 km : 2.414 km] - fields: (:Topography,) - -```` +!!! note + + In case you did not manage that, we have prepared a JLD2 file [here](https://seafile.rlp.net/f/64e0b0a6b1e941b7bba5/?dl=1). Download it, and doublecheck that you are in the same directory as the data file with: + ````julia + pwd() + "/Users/kausb/.julia/dev/GeophysicalModelGenerator/docs/src/scripts" + ```` + Load the data with: + ````julia + julia> Topo=load_GMG("Topo_LaPalma") + GeoData + size : (1921, 1441, 1) + lon ϵ [ 341.3 : 342.9] + lat ϵ [ 28.0 : 29.2] + depth ϵ [ -4.090296875 : 0.0] + fields : (:Topography,) + attributes: ["note"] + ```` Next, lets load the seismicity. We have prepared a file with earthquake locations up to early November (from [https://www.ign.es/web/ign/portal/vlc-catalogo](https://www.ign.es/web/ign/portal/vlc-catalogo)). -Download that [here](https://seafile.rlp.net/f/03286a46a9f040a69b0f/?dl=1) +Download that [here](https://seafile.rlp.net/f/64e0b0a6b1e941b7bba5/?dl=1) ````julia -data_all_EQ = load("EQ_Data.jld2","data_all_EQ") -```` - -```` -GeoData - size : (6045,) - lon ϵ [ -18.0341 : -17.6671] - lat ϵ [ 28.3102 : 28.8144] - depth ϵ [ NaN km : NaN km] - fields: (:Magnitude, :TimeSinceJan1_2021) - +data_all_EQ = load_GMG("EQ_Data") +GeoData + size : (6045,) + lon ϵ [ -17.841 : -17.8268] + lat ϵ [ 28.5717 : 28.573] + depth ϵ [ -29.0 : -11.0] + fields : (:Magnitude, :TimeSinceJan1_2021) + attributes: ["note"] ```` Write the data to paraview with - ````julia -Write_Paraview(data_all_EQ,"data_all_EQ",PointsData=true) -Write_Paraview(Topo,"Topo") -```` - -```` +julia> Write_Paraview(data_all_EQ,"data_all_EQ",PointsData=true) Saved file: data_all_EQ.vtu -Saved file: Topo.vts +julia> Write_Paraview(Topo,"Topo") +Saved file: Topo.vts ```` As earthquakes are point-wise data, you have to specify this. @@ -103,28 +87,22 @@ In order to create model setups, it is helpful to first transfer the data to Car This requires us to first determine a *projection point*, that is fixed. Often, it is helpful to use the center of the topography for this. In the present example, we will center the model around La Palma itself: ````julia -proj = ProjectionPoint(Lon=-17.84, Lat=28.56) -```` - -```` +julia> proj = ProjectionPoint(Lon=-17.84, Lat=28.56) ProjectionPoint(28.56, -17.84, 222158.69478000276, 3.1625327286383654e6, 28, true) ```` Once this is done you can convert the topographic data to the cartesian reference frame ````julia -EQ_cart = Convert2CartData(data_all_EQ, proj); -Topo_cart = Convert2CartData(Topo, proj) -```` - -```` -CartData - size : (1921, 1441, 1) - x ϵ [ -86.09445705828863 km : 73.67229892155609 km] - y ϵ [ -63.5531883197492 km : 73.28446155584604 km] - z ϵ [ -4.38297802734375 km : 2.414 km] - fields : (:Topography,) - +julia> EQ_cart = Convert2CartData(data_all_EQ, proj); +julia> Topo_cart = Convert2CartData(Topo, proj) +CartData + size : (1921, 1441, 1) + x ϵ [ -86.09445705828863 : 73.67229892155609] + y ϵ [ -63.5531883197492 : 73.28446155584604] + z ϵ [ -4.38352294921875 : 2.414] + fields : (:Topography,) + attributes: ["note"] ```` It is important to realize that the cartesian coordinates of the topographic grid is no longer strictly orthogonal after this conversion. You don't notice that in the current example, as the model domain is rather small. @@ -132,37 +110,30 @@ In other cases, however, this is quite substantial (e.g., India-Asia collision z LaMEM needs an orthogonal grid of topography, which we can create with: ````julia -Topo_LaMEM = CartData(XYZGrid(-70:.2:70,-60:.2:70,0)); -nothing #hide +julia> Topo_LaMEM = CartData(XYZGrid(-70:.2:70,-60:.2:70,0)); ```` In a next step, the routine `ProjectCartData` projects a `GeoData` structure to a `CartData` struct ````julia -Topo_LaMEM = ProjectCartData(Topo_LaMEM, Topo, proj) -```` - -```` -CartData - size : (701, 651, 1) - x ϵ [ -70.0 km : 70.0 km] - y ϵ [ -60.0 km : 70.0 km] - z ϵ [ -4.29541702331831 km : 2.3840784607152257 km] - fields : (:Topography,) - +julia> Topo_LaMEM = ProjectCartData(Topo_LaMEM, Topo, proj) +CartData + size : (701, 651, 1) + x ϵ [ -70.0 : 70.0] + y ϵ [ -60.0 : 70.0] + z ϵ [ -4.29608214262314 : 2.3840784607152257] + fields : (:Topography,) + attributes: ["note"] ```` Let's have a look at the data: ````julia -Write_Paraview(EQ_cart,"EQ_cart",PointsData=true) -Write_Paraview(Topo_LaMEM,"Topo_LaMEM") -```` - -```` +julia> Write_Paraview(EQ_cart,"EQ_cart",PointsData=true) Saved file: EQ_cart.vtu -Saved file: Topo_LaMEM.vts +julia> Write_Paraview(Topo_LaMEM,"Topo_LaMEM") +Saved file: Topo_LaMEM.vts ```` ## 3. Create LaMEM setup @@ -173,70 +144,60 @@ The LaMEM input file can be downloaded [here](../scripts/LaPalma.dat). Make sure you are in the same directory as the *.dat file & execute the following command ````julia -Grid = ReadLaMEM_InputFile("LaPalma.dat") -```` - -```` -LaMEM Grid: +julia> Grid = ReadLaMEM_InputFile("LaPalma.dat") +LaMEM Grid: nel : (64, 64, 32) marker/cell : (3, 3, 3) - markers : (192, 192, 192) + markers : (192, 192, 96) x ϵ [-50.0 : 50.0] y ϵ [-40.0 : 40.0] z ϵ [-80.0 : 15.0] - ```` The `LaMEM_grid` structure contains the number of elements in every direction and the number of markers in every cell. It also contains `Grid.X`, `Grid.Y`, `Grid.Z`, which are the coordinates of each of the markers in the 3 directions. -In a next step we need to give each of these points a `Phase` number (which is an integer, that indicates the type of the rock that point has), as well as the temperature (in Celcius). +In a next step we need to give each of these points a `Phase` number (which is an integer, that indicates the type of the rock that point has), as well as the temperature (in Celsius). ````julia -Phases = ones(Int32,size(Grid.X))*2; -Temp = ones(Float64,size(Grid.X)); -nothing #hide +julia> Phases = ones(Int32,size(Grid.X))*2; +julia> Temp = ones(Float64,size(Grid.X)); ```` In this example we set the temperature based on the depth, assuming a constant geotherm. Depth is given in kilometers ````julia -Geotherm = 30; -Temp = -Grid.Z.*Geotherm; -nothing #hide +julia> Geotherm = 30; +julia> Temp = -Grid.Z.*Geotherm; ```` Since we are in La Palma, we assume that temperatures are not less than 20 degrees. Moreover, in the mantle, we have the mantle adiabat (1350 C) ````julia -Temp[Temp.<20] .= 20; -Temp[Temp.>1350] .= 1350; -nothing #hide +julia> Temp[Temp.<20] .= 20; +julia> Temp[Temp.>1350] .= 1350; ```` Because of lacking data, we have pretty much no idea where the Moho is in La Palma. Somewhat arbitrary, we assume it to be at 40 km depth, and set the rocks below that to "mantle": ````julia -ind = findall( Grid.Z .< -40); -Phases[ind] .= 3; -nothing #hide +julia> ind = findall( Grid.Z .< -40); +julia> Phases[ind] .= 3; ```` Everything above the free surface is assumed to be "air" ````julia -ind = AboveSurface(Grid, Topo_cart); -Phases[ind] .= 0; -nothing #hide +julia> ind = AboveSurface(Grid, Topo_cart); +julia> Phases[ind] .= 0; ```` And all "air" points that are below sea-level becomes "water" ````julia -ind = findall( (Phases.==0) .& (Grid.Z .< 0)); -Phases[ind] .= 1; -nothing #hide +julia> ind = findall( (Phases.==0) .& (Grid.Z .< 0)); +julia> Phases[ind] .= 1; ```` You can interpret the seismicity in different ways. If you assume that there are fully molten magma chambers, there should be no earthquakes within the magma chamber (as stresses are liklely to be very small). @@ -246,34 +207,27 @@ We will assume the second option in our model setup. Looking at the seismicity, there is a swarm at around 35 km depth ````julia -AddSphere!(Phases,Temp,Grid, cen=(0,0,-35), radius=5, phase=ConstantPhase(5), T=ConstantTemp(1200)); -nothing #hide +julia> AddSphere!(Phases,Temp,Grid, cen=(0,0,-35), radius=5,phase=ConstantPhase(5), T=ConstantTemp(1200)); ```` A shallower one exists as well ````julia -AddEllipsoid!(Phases,Temp,Grid, cen=(-1,0,-11), axes=(3,3,8), StrikeAngle=225, DipAngle=45, phase=ConstantPhase(5), T=ConstantTemp(1200)); -nothing #hide +julia> AddEllipsoid!(Phases,Temp,Grid, cen=(-1,0,-11), axes=(3,3,8), StrikeAngle=225, DipAngle=45, phase=ConstantPhase(5), T=ConstantTemp(1200)); ```` And there might be a mid-crustal one ````julia -AddEllipsoid!(Phases,Temp,Grid, cen=(-0,0,-23), axes=(8,8,2), StrikeAngle=0, DipAngle=0, phase=ConstantPhase(5), T=ConstantTemp(1200)); -nothing #hide +julia> AddEllipsoid!(Phases,Temp,Grid, cen=(-0,0,-23), axes=(8,8,2), StrikeAngle=0, DipAngle=0, phase=ConstantPhase(5), T=ConstantTemp(1200)); ```` We can generate a 3D model setup, which must include the `Phases` and `Temp` arrays ````julia -Model3D = CartData(Grid, (Phases=Phases,Temp=Temp)) -Write_Paraview(Model3D,"Model3D") -```` - -```` +julia> Model3D = CartData(Grid, (Phases=Phases,Temp=Temp)); +julia> Write_Paraview(Model3D,"Model3D") Saved file: Model3D.vts - ```` The model setup looks like this @@ -282,20 +236,16 @@ The model setup looks like this We can create a LaMEM marker file from the `Model3D` setup and the (cartesian) topography ````julia -Save_LaMEMTopography(Topo_cart, "Topography.txt") -Save_LaMEMMarkersParallel(Model3D) -```` - -```` +julia> Save_LaMEMTopography(Topo_cart, "Topography.txt") Written LaMEM topography file: Topography.txt +julia> Save_LaMEMMarkersParallel(Model3D) Writing LaMEM marker file -> ./markers/mdb.00000000.dat - ```` + Next, you can use this to run the LaMEM model and visualize the model results in Paraview as well. If you are interested in doing this, have a look at the LaMEM [wiki](https://bitbucket.org/bkaus/lamem/wiki/Home) pages. --- *This page was generated using [Literate.jl](https://github.com/fredrikekre/Literate.jl).* - diff --git a/docs/src/man/Tutorial_Votemaps.md b/docs/src/man/Tutorial_Votemaps.md index 444b6d31..2d7e4078 100644 --- a/docs/src/man/Tutorial_Votemaps.md +++ b/docs/src/man/Tutorial_Votemaps.md @@ -16,17 +16,17 @@ We assume that you have all tomographic models already available as *.JLD2 files Specifically, we will use the tomographic models of Paffrath, Zhao and Koulakov, as we have them all available in processed form Download the corresponding *.jld2 files to the same directory -````@example Tutorial_Votemaps +```julia using JLD2, GeophysicalModelGenerator Pwave_Zhao = load("Zhao_Pwave.jld2","Data_set_Zhao2016_Vp") PSwave_Koulakov = load("Koulakov_Europe.jld2","DataKoulakov_Alps") Pwave_Paffrath = load("Paffrath_Pwave.jld2","Data_set_Paffrath2021_Vp") -```` +``` The 3 data sets all contain tomographic models for roughly the alpine area, but as you can see, they all have a different resolution and the fields are sometimes called different as well: -````@example Tutorial_Votemaps +```julia Pwave_Paffrath GeoData size : (162, 130, 42) @@ -42,7 +42,7 @@ PSwave_Koulakov lat ϵ [ 37.035928143712574 : 49.01197604790419] depth ϵ [ -700.0 km : -10.0 km] fields: (:dVp_percentage, :dVs_percentage) -```` +``` #### 2. Creating a Votemap The idea of `Votemaps` is rather simple: @@ -66,20 +66,20 @@ Data_VoteMap = VoteMap( [Pwave_Paffrath, PSwave_Koulakov, Pwave_Zhao], This will look at the common `lon`,`lat`,`depth` ranges between all 3 models, interpret each of the models to a common grid of size `(100,100,100)` and apply each of the criteria specified The resulting `GeoData` struct looks like: -````@example Tutorial_Votemaps +```julia GeoData size : (100, 100, 100) lon ϵ [ 4.0 : 18.0] lat ϵ [ 38.0 : 49.01197604790419] depth ϵ [ -606.0385 km : -10.0 km] fields: (:VoteMap,) -```` +``` And from this, we can generate profiles, visualize 3D features in Paraview etc. etc. -````@example Tutorial_Votemaps +```julia Write_Paraview(Data_VoteMap, "VoteMap_Alps") -```` +``` In paraview, this gives ![Tutorial_VoteMap](../assets/img/Tutorial_VoteMap.png) @@ -87,11 +87,11 @@ In paraview, this gives You can ofcourse argue that newer tomographic models include more data & should therefore count more A simple way to take that into account is to list the model twice: -````@example Tutorial_Votemaps +```julia Data_VoteMap = VoteMap( [Pwave_Paffrath, Pwave_Paffrath, PSwave_Koulakov, Pwave_Zhao], ["dVp_Percentage>3.0","dVp_Percentage>3.0", "dVp_percentage>2.0","dVp_Percentage>2.0"], dims=(100,100,100)) -```` +``` Similarly, you could only look at a single model (even when that is perhaps easier done directly in paraview, where you can simply select a different isocontour value) diff --git a/docs/src/man/dataimport.md b/docs/src/man/dataimport.md index 35303e7f..f4e8a01e 100644 --- a/docs/src/man/dataimport.md +++ b/docs/src/man/dataimport.md @@ -6,4 +6,6 @@ We have a number of ways to import data, besides using any of the additional pac GeophysicalModelGenerator.Screenshot_To_GeoData GeophysicalModelGenerator.Screenshot_To_CartData GeophysicalModelGenerator.Screenshot_To_UTMData +GeophysicalModelGenerator.ImportTopo +GeophysicalModelGenerator.ImportGeoTIFF ``` diff --git a/docs/src/man/tools.md b/docs/src/man/tools.md index 46d4f1eb..2d17c007 100644 --- a/docs/src/man/tools.md +++ b/docs/src/man/tools.md @@ -15,7 +15,6 @@ GeophysicalModelGenerator.ParseColumns_CSV_File GeophysicalModelGenerator.RotateTranslateScale! GeophysicalModelGenerator.Convert2UTMzone GeophysicalModelGenerator.Convert2CartData -GeophysicalModelGenerator.ImportTopo GeophysicalModelGenerator.ProjectCartData GeophysicalModelGenerator.DrapeOnTopo GeophysicalModelGenerator.LithostaticPressure! diff --git a/docs/src/man/tutorial_ISC_data.md b/docs/src/man/tutorial_ISC_data.md index 25ee0c8c..8d43dad9 100644 --- a/docs/src/man/tutorial_ISC_data.md +++ b/docs/src/man/tutorial_ISC_data.md @@ -4,18 +4,18 @@ This explains how to load earthquake data obtained from the ISC catalogue. ## Steps -#### 1. Download data -You can get data from the ISC catalogue here: +#### 1. Download data +You can get data from the ISC catalogue here: [http://www.isc.ac.uk/iscbulletin/search/catalogue/](http://www.isc.ac.uk/iscbulletin/search/catalogue/) The catalogue will give you an on screen CSV output that will then have to be copied to a file of your choice (here we will call it *ISC1.dat*). Do that and start julia from the directory where it was downloaded. #### 2. Read data into Julia -The main data-file, `ISC1.dat`, has 23 lines of comments (indicated with `#`), after which the data starts. We can use the julia package [https://github.com/JuliaData/CSV.jl](CSV.jl) to read in the data, and tell it that the data is seperated by `,`. +The main data-file, `ISC1.dat`, has 23 lines of comments (indicated with `#`), after which the data starts. We can use the julia package [https://github.com/JuliaData/CSV.jl](CSV.jl) to read in the data, and tell it that the data is separated by `,`. ```julia-repl julia> using CSV, GeophysicalModelGenerator julia> data_file = CSV.File("ISC1.dat",datarow=24,header=false,delim=',') ``` -As this data contains a lot of information that we are not interested in at the moment and which is given in non-numeric fomats (e.g. date, time etc.), we will use our helper function *ParseColumns_CSV_File* to only extract columns with numeric data. +As this data contains a lot of information that we are not interested in at the moment and which is given in non-numeric formats (e.g. date, time etc.), we will use our helper function *ParseColumns_CSV_File* to only extract columns with numeric data. ```julia-repl julia> data = ParseColumns_CSV_File(data_file, 14) julia> lon = data[:,2]; diff --git a/docs/src/man/tutorial_MohoTopo.md b/docs/src/man/tutorial_MohoTopo.md index 4912936d..64040ae6 100644 --- a/docs/src/man/tutorial_MohoTopo.md +++ b/docs/src/man/tutorial_MohoTopo.md @@ -1,20 +1,20 @@ # Moho topography ## Goal -This explains how to load the Moho topography for Italy and the Alps and create a paraview file +This explains how to load the Moho topography for Italy and the Alps and create a paraview file Spada, M., Bianchi, I., Kissling, E., Agostinetti, N.P., Wiemer, S., 2013. *Combining controlled-source seismology and receiver function information to derive 3-D Moho topography for Italy.* Geophysical Journal International 194, 1050–1068. [doi:10.1093/gji/ggt148](https://doi.org/10.1093/gji/ggt148) ## Steps -#### 1. Download data +#### 1. Download data The data is available as digital dataset on the researchgate page of Prof. Edi Kissling [https://www.researchgate.net/publication/322682919_Moho_Map_Data-WesternAlps-SpadaETAL2013](https://www.researchgate.net/publication/322682919_Moho_Map_Data-WesternAlps-SpadaETAL2013) We have also uploaded it here: [https://seafile.rlp.net/d/a50881f45aa34cdeb3c0/](https://seafile.rlp.net/d/a50881f45aa34cdeb3c0/) -The full data set actually includes 3 different Moho's (Europe, Adria, Tyrrhenia-Corsica). To simplify matters, we have split the full file into 3 seperate ascii files and uploaded it. +The full data set actually includes 3 different Moho's (Europe, Adria, Tyrrhenia-Corsica). To simplify matters, we have split the full file into 3 separate ascii files and uploaded it. Please download the files `Moho_Map_Data-WesternAlps-SpadaETAL2013_Moho1.txt`, `Moho_Map_Data-WesternAlps-SpadaETAL2013_Moho2.txt` and `Moho_Map_Data-WesternAlps-SpadaETAL2013_Moho3.txt` @@ -30,7 +30,7 @@ Note that depth is made negative. #### 3. Reformat the data -Next, let's check if the data is spaced in a regular manner in Lon/Lat direction. +Next, let's check if the data is spaced in a regular manner in Lon/Lat direction. For that, we plot it using the `Plots` package (you may have to install that first on your machine). ```julia julia> using Plots @@ -45,13 +45,13 @@ The easiest way to transfer this to Paraview is to simply save this as 3D data p ```julia julia> using GeophysicalModelGenerator julia> data_Moho1 = GeoData(lon,lat,depth,(MohoDepth=depth*km,)) -GeoData +GeoData size : (12355,) lon ϵ [ 4.00026 - 11.99991] lat ϵ [ 42.51778 - 48.99544] depth ϵ [ -57.46 km - -21.34 km] fields: (:MohoDepth,) -julia> Write_Paraview(data_Moho1, "Spada_Moho_Europe", PointsData=true) +julia> Write_Paraview(data_Moho1, "Spada_Moho_Europe", PointsData=true) ``` And we can do the same with the other two Moho's: @@ -59,11 +59,11 @@ And we can do the same with the other two Moho's: julia> data =readdlm("Moho_Map_Data-WesternAlps-SpadaETAL2013_Moho2.txt",' ',Float64,'\n', skipstart=38,header=false); julia> lon, lat, depth = data[:,1], data[:,2], -data[:,3]; julia> data_Moho2 = GeoData(lon,lat,depth,(MohoDepth=depth*km,)) -julia> Write_Paraview(data_Moho2, "Spada_Moho_Adria", PointsData=true) +julia> Write_Paraview(data_Moho2, "Spada_Moho_Adria", PointsData=true) julia> data =readdlm("Moho_Map_Data-WesternAlps-SpadaETAL2013_Moho3.txt",' ',Float64,'\n', skipstart=38,header=false); julia> lon, lat, depth = data[:,1], data[:,2], -data[:,3]; julia> data_Moho3 = GeoData(lon,lat,depth,(MohoDepth=depth*km,)) -julia> Write_Paraview(data_Moho3, "Spada_Moho_Tyrrhenia", PointsData=true) +julia> Write_Paraview(data_Moho3, "Spada_Moho_Tyrrhenia", PointsData=true) ``` If we plot this in paraview, it looks like this: @@ -71,7 +71,7 @@ If we plot this in paraview, it looks like this: #### 3.1 Fitting a mesh through the data -So obviously, the Moho is discontinuous between these three Mohos. Often, it looks nicer if we fit a regular surface through these data points. To do this we first combine the data points of the 3 surfaces into one set of points +So obviously, the Moho is discontinuous between these three Mohos. Often, it looks nicer if we fit a regular surface through these data points. To do this we first combine the data points of the 3 surfaces into one set of points ``` julia> lon = [data_Moho1.lon.val; data_Moho2.lon.val; data_Moho3.lon.val]; @@ -80,18 +80,18 @@ julia> depth = [data_Moho1.depth.val; data_Moho2.depth.val; data_Moho3.depth.val julia> data_Moho_combined = GeoData(lon, lat, depth, (MohoDepth=depth*km,)) ``` -Next, we define a regular lon/lat grid +Next, we define a regular lon/lat grid ```julia julia> Lon,Lat,Depth = LonLatDepthGrid(4.1:0.1:11.9,42.5:.1:49,-30km); ``` -And we will use a nearest neighbor interpolation method to fit a surface through the data. This has the advantage that it will take the discontinuities into account. We will use the package [NearestNeighbors.jl](https://github.com/KristofferC/NearestNeighbors.jl) for this, which you may have to install first +And we will use a nearest neighbor interpolation method to fit a surface through the data. This has the advantage that it will take the discontinuities into account. We will use the package [NearestNeighbors.jl](https://github.com/KristofferC/NearestNeighbors.jl) for this, which you may have to install first ```julia julia> using NearestNeighbors julia> kdtree = KDTree([lon'; lat']) julia> idxs, dists = knn(kdtree, [Lon[:]'; Lat[:]'], 1, true) ``` -`idxs` contains the indices of the closest points to the grid in (Lon,Lat). Next +`idxs` contains the indices of the closest points to the grid in (Lon,Lat). Next ```julia julia> Depth = zeros(size(Lon))*km; julia> for i=1:length(idxs) @@ -102,7 +102,7 @@ Now, we can create a `GeoData` structure with the regular surface and save it to ```julia julia> data_Moho = GeoData(Lon, Lat, Depth, (MohoDepth=Depth,)) -julia> Write_Paraview(data_Moho, "Spada_Moho_combined") +julia> Write_Paraview(data_Moho, "Spada_Moho_combined") ``` The result is shown here, where the previous points are colored white and are a bit smaller. Obviously, the datasets coincide well. ![DataPoints_Moho_surface](../assets/img/Tutorial_MohoSpada_Surface_Paraview.png) @@ -113,6 +113,3 @@ The full julia script that does it all is given [here](https://github.com/JuliaG ```julia julia> include("MohoTopo_Spada.jl") ``` - - - diff --git a/docs/src/man/tutorial_ProfileProcessing.md b/docs/src/man/tutorial_ProfileProcessing.md index 42129a93..490060cc 100644 --- a/docs/src/man/tutorial_ProfileProcessing.md +++ b/docs/src/man/tutorial_ProfileProcessing.md @@ -10,7 +10,7 @@ The function `save_GMG` is a convenient function to save `GeoData` structure (an Generally, in most applications we either have `Volume` (e.g., seismic tomography), `Surface` (Moho surface) or `Point` data (earthquake locations). Some special datasets in GMG are `Screenshot` data or `Topography` data, which we treat separately as graphical user interface often treat this in a different way. -A simple way to define all this info is by using the `GMG_Dataset` format. +A simple way to define all this info is by using the `GMG_Dataset` format. ```julia julia> data_EQ = GMG_Dataset("AlpArraySeis","Point","https://seafile.rlp.net/f/87d565882eda40689666/?dl=1", true) GMG Point Dataset (active) : AlpArraySeis @ https://seafile.rlp.net/f/87d565882eda40689666/?dl=1 @@ -49,27 +49,27 @@ julia> VolData_combined = combine_VolData(VolData, dataset_preferred = 1) ``` In this case, we take the dimensions of the first dataset and propject all other data onto that. You can customize that. -## Define profiles +## Define profiles Next, lets define profiles: ```julia julia> prof1 = ProfileData(start_lonlat=(5,45), end_lonlat=(15,49)); julia> prof2 = ProfileData(depth = -100) -Horizontal ProfileData - depth : -100.0 +Horizontal ProfileData + depth : -100.0 ``` -## Project data onto profiles +## Project data onto profiles You can project all data onto the profiles with: ```julia julia> ExtractProfileData!(prof1, VolData_combined, SurfData, PointData) julia> ExtractProfileData!(prof2, VolData_combined, SurfData, PointData) julia> prof1 Vertical ProfileData - lon/lat : (5.0, 45.0)-(15.0, 49.0) - # VolData : (:Hua2017_Vp, :Hua2017_dVp_perc, :Plomerova2022_Vp, :Plomerova2022_dVp, :x_profile) - # SurfData : (:Mrozek_Moho_Grid_EU,) - # PointData: (:AlpArraySeis,) + lon/lat : (5.0, 45.0)-(15.0, 49.0) + # VolData : (:Hua2017_Vp, :Hua2017_dVp_perc, :Plomerova2022_Vp, :Plomerova2022_dVp, :x_profile) + # SurfData : (:Mrozek_Moho_Grid_EU,) + # PointData: (:AlpArraySeis,) ``` By default we use 50 km wide band of the data to project pointdata. You can change that with: @@ -90,13 +90,13 @@ data_Surf = GMG_Dataset("Mrozek_Moho_Grid_EU","Surface","https://seafile.rlp.net data_EQ = GMG_Dataset("AlpArraySeis","Point","https://seafile.rlp.net/f/87d565882eda40689666/?dl=1", true) data_SS = GMG_Dataset("Handy_etal_SE_Profile1","Screenshot","https://seafile.rlp.net/f/5ffe580e765e4bd1bafe/?dl=1", true) -# Note: the volumetric datasets are choosen as they are smaller in size (less download) +# Note: the volumetric datasets are chosen as they are smaller in size (less download) data_Vol1 = GMG_Dataset("Hua2017","Volume","https://seafile.rlp.net/f/1fb68b74e5d742d39e62/?dl=1", true) data_Vol2 = GMG_Dataset("Plomerova2022","Volume","https://seafile.rlp.net/f/abccb8d3302b4ef5af17/?dl=1", true) #data_Vol1 = GMG_Dataset("Paffrath2021","Volume","https://seafile.rlp.net/f/5c8c851af6764b5db20d/?dl=1", true) #data_Vol2 = GMG_Dataset("Zhao2016","Volume","https://seafile.rlp.net/f/e81a6d075f6746609973/?dl=1", true) -# Now load these datasets into NamedTuples +# Now load these datasets into NamedTuples SurfData = load_GMG(data_Surf) PointData = load_GMG(data_EQ) ScreenshotData = load_GMG(data_SS) @@ -128,7 +128,7 @@ VolData_combined2 = combine_VolData(VolData, dims=(50,51,52)) VolData_combined3 = combine_VolData(VolData, lon=(1,22), lat=(40,52), dims=(50,51,52)) @test isnan(VolData_combined3.fields.Hua2017_Vp[1000]) -# Define horizonal & vertical profiles +# Define horizontal & vertical profiles prof1 = ProfileData(start_lonlat=(5,45), end_lonlat=(15,49)) prof2 = ProfileData(depth = -100) prof3 = ProfileData(start_lonlat=(5,45), end_lonlat=(5,49)) @@ -148,7 +148,7 @@ GeophysicalModelGenerator.CreateProfileVolume!(prof1, VolData_combined1, Depth_ GeophysicalModelGenerator.CreateProfileSurface!(prof1,SurfData) @test prof1.SurfData[1].fields.MohoDepth[80] ≈ -37.58791461075397km -# dito with EQ data: +# ditto with EQ data: GeophysicalModelGenerator.CreateProfilePoint!(prof1,PointData, section_width=5km) GeophysicalModelGenerator.CreateProfilePoint!(prof4,PointData, section_width=10km) @test length(prof1.PointData[1].lon) == 13 @@ -192,4 +192,3 @@ profile_backwards_compat = ExtractProfileData("test_files/PickedProfiles.txt",1, @test length(profile_backwards_compat.PointData[1].lon) == 440 --> - diff --git a/docs/src/man/tutorial_Screenshot_To_Paraview.md b/docs/src/man/tutorial_Screenshot_To_Paraview.md index ddc734cb..7278c2a6 100644 --- a/docs/src/man/tutorial_Screenshot_To_Paraview.md +++ b/docs/src/man/tutorial_Screenshot_To_Paraview.md @@ -1,12 +1,12 @@ -# Import profiles/maps from published papers +# Import profiles/maps from published papers ## Goal -Ideally, all data should be availabe in digital format, after which you could use the tools described in the other tutorial to transform them into `GeoData` and export them to VTK. +Ideally, all data should be available in digital format, after which you could use the tools described in the other tutorial to transform them into `GeoData` and export them to VTK. Yet, the reality is different and often data is not (yet) available, or papers are old and the authors can no longer be contacted. For that reason, `GeophysicalModelGenerator` has tools that allow you to transfer a screenshot from any published paper into `GeoData/Paraview` and see it in 3D at the correct geographic location. This can be done for vertical profiles and for mapviews, which gives you a quick and easy way to see those papers in a new (3D) light. -Here, we explain how. +Here, we explain how. - [Import profiles/maps from published papers](#import-profilesmaps-from-published-papers) - [Goal](#goal) - [General procedure](#general-procedure) @@ -50,7 +50,7 @@ Extracting GeoData from: Lippitsch_Fig13a.png └ lower right = (17.23 , 43.8 , -400.0 ) └ upper left = (4.65 , 45.73 , 0.0 ) └ upper right = (17.23 , 43.8 , 0.0 ) -GeoData +GeoData size : (325, 824, 1) lon ϵ [ 4.6499999999999995 : 17.230000000000004] lat ϵ [ 43.79999999999999 : 45.730000000000004] @@ -59,7 +59,7 @@ GeoData ``` Finally, you save it in Paraview format as always: ```julia -julia> Write_Paraview(data_profile1, "Lippitsch_Fig13a_profile") +julia> Write_Paraview(data_profile1, "Lippitsch_Fig13a_profile") ``` You can open this in paraview. Here, it is shown along with topographic data (made transparent): @@ -81,10 +81,10 @@ Corner_UpperRight = (15.5, 50.0 , -150.0) Corner_LowerRight = (15.5, 43.0 , -150.0) Corner_UpperLeft = (3.5 , 50.0 , -150.0) data_Fig13_map = Screenshot_To_GeoData("Fig13_mapview.png",Corner_LowerLeft, Corner_UpperRight, Corner_LowerRight=Corner_LowerRight,Corner_UpperLeft=Corner_UpperLeft) -Write_Paraview(data_Fig13_map, "Lippitsch_Fig13_mapview") +Write_Paraview(data_Fig13_map, "Lippitsch_Fig13_mapview") ``` -Once added to paraview (together with a few additional map views from the same paper): +Once added to paraview (together with a few additional map views from the same paper): ![Tutorial_ScreenShots_Lippitsch_4](../assets/img/Tutorial_ScreenShots_Lippitsch_4.png) #### 4. Using an automatic digitizer to pick points on map @@ -112,10 +112,10 @@ The general approach is simple: open a multiblock file, and pass the filename to An example showing you how this works is: ```julia julia> vtmfile = vtk_multiblock("Lippitsch_CrossSections") -julia> Write_Paraview(data_Fig12_90km, vtmfile) -julia> Write_Paraview(data_Fig12_180km, vtmfile) -julia> Write_Paraview(data_Fig12_300km, vtmfile) -julia> Write_Paraview(data_Fig12_400km, vtmfile) +julia> Write_Paraview(data_Fig12_90km, vtmfile) +julia> Write_Paraview(data_Fig12_180km, vtmfile) +julia> Write_Paraview(data_Fig12_300km, vtmfile) +julia> Write_Paraview(data_Fig12_400km, vtmfile) julia> vtk_save(vtmfile) ``` Note that you have to create the cross-sections first (see the julia script below). @@ -127,4 +127,3 @@ The full julia script that interprets all the figures is given [here](https://gi ```julia julia> include("Lippitsch_Screenshots.jl") ``` - diff --git a/docs/src/man/tutorial_loadirregular3DSeismicData.md b/docs/src/man/tutorial_loadirregular3DSeismicData.md index 2a9de39c..3a5575e1 100644 --- a/docs/src/man/tutorial_loadirregular3DSeismicData.md +++ b/docs/src/man/tutorial_loadirregular3DSeismicData.md @@ -9,11 +9,11 @@ As the data is not given in a regular lon/lat grid, we first interpolate it to a ## Steps -#### 1. Download data +#### 1. Download data The data is can be downloaded from [https://www.seismologie.ifg.uni-kiel.de/en/research/research-data/mere2020model](https://www.seismologie.ifg.uni-kiel.de/en/research/research-data/mere2020model). Do that and start julia from the directory where it was downloaded. #### 2. Read data into Julia -The main data-file, `El-Sharkawy-etal-G3.2020_MeRE2020_Mediterranean.csv`, has 23 lines of comments (indicated with `#`), after which the data starts. We can use the build-in package `DelimitedFiles` to read in the data, and tell it that the data is seperated by `|`. We also want the resulting data to be stored as double precision values (`Float64`), and the end of every line is a linebreak (`\n`). +The main data-file, `El-Sharkawy-etal-G3.2020_MeRE2020_Mediterranean.csv`, has 23 lines of comments (indicated with `#`), after which the data starts. We can use the build-in package `DelimitedFiles` to read in the data, and tell it that the data is separated by `|`. We also want the resulting data to be stored as double precision values (`Float64`), and the end of every line is a linebreak (`\n`). ```julia-repl julia> using DelimitedFiles julia> data=readdlm("El-Sharkawy-etal-G3.2020_MeRE2020_Mediterranean.csv",'|',Float64,'\n', skipstart=23,header=false) @@ -27,7 +27,7 @@ julia> data=readdlm("El-Sharkawy-etal-G3.2020_MeRE2020_Mediterranean.csv",'|',Fl 40.26 -10.98 50.0 4.36 42.19 -10.97 50.0 4.38 44.1 -10.97 50.0 4.38 - ⋮ + ⋮ ``` Next, extract vectors from it: ``` @@ -55,7 +55,7 @@ The result looks like this: So this is somewhat regular but not entirely and in some areas data points are missing. It is possible to create a VTK mesh that exactly respects this data, but for that we need knowledge on how the points are connected in 3D. The comments in the file do not provide this information, which is why we interpolate it on a regular lon/lat grid here which uses the same depth levels as in the data. -We extract the available depth levels with +We extract the available depth levels with ```julia julia> Depth_vec = unique(depth) 301-element Vector{Float64}: @@ -78,7 +78,7 @@ which shows that the data set goes from `[-350:1:-50]`. Let's create a regular grid, which describes a somewhat smaller area than the data-points to ensure that we can do an interpolation without having to extrapolate ```julia -julia> using GeophysicalModelGenerator +julia> using GeophysicalModelGenerator julia> Lon,Lat,Depth = LonLatDepthGrid(-10:0.5:40,32:0.25:50,Depth_vec); julia> size(Lon) (101, 73, 301) @@ -93,9 +93,9 @@ julia> scatter!(Lon[:,:,1],Lat[:,:,1],color=:white, markersize=1.5, markertype=" #### 3.2 Interpolate to a regular grid -Next, we need a method to interpolate the irregular datapoints @ a certain depth level to the white data points. +Next, we need a method to interpolate the irregular datapoints @ a certain depth level to the white data points. -There are a number of ways to do this, for example by employing [GMT.jl](https://github.com/GenericMappingTools/GMT.jl), or by using [GeoStats.jl](https://juliaearth.github.io/GeoStats.jl/stable/index.html). +There are a number of ways to do this, for example by employing [GMT.jl](https://github.com/GenericMappingTools/GMT.jl), or by using [GeoStats.jl](https://juliaearth.github.io/GeoStats.jl/stable/index.html). In this example, we will employ GeoStats. If you haven't installed it yet, do that with ```julia julia> ] @@ -132,10 +132,10 @@ julia> P = EstimationProblem(Geo, Cgrid, :Vs) data: 12278 MeshData{2,Float64} domain: 101×73 CartesianGrid{2,Float64} variables: Vs (Float64) -julia> S = IDW(:Vs => (distance=Euclidean(),neighbors=2)); +julia> S = IDW(:Vs => (distance=Euclidean(),neighbors=2)); julia> sol = solve(P, S) ``` -Here, we interpolated the data based on the Euclidean distance. Other methods, such as Kriging, can be used as well. +Here, we interpolated the data based on the Euclidean distance. Other methods, such as Kriging, can be used as well. Next, we can extract the data ```julia julia> sol_Vs = values(sol).Vs @@ -153,7 +153,7 @@ julia> for iz=1:size(Depth,3) coord = PointSet([lon[ind]'; lat[ind]']) Geo = georef((Vs=Vs[ind],), coord) P = EstimationProblem(Geo, Cgrid, :Vs) - S = IDW(:Vs => (distance=Euclidean(),neighbors=2)); + S = IDW(:Vs => (distance=Euclidean(),neighbors=2)); sol = solve(P, S) sol_Vs= values(sol).Vs Vs_2D = reshape(sol_Vs, size(domain(sol))) @@ -162,16 +162,16 @@ julia> for iz=1:size(Depth,3) ``` #### 4. Generate Paraview file -Once the 3D velocity matrix has been generated, producing a Paraview file is done with the following command +Once the 3D velocity matrix has been generated, producing a Paraview file is done with the following command ```julia julia> using GeophysicalModelGenerator -julia> Data_set = GeoData(Lon,Lat,Depth,(Vs_km_s=Vs_3D,)) -GeoData +julia> Data_set = GeoData(Lon,Lat,Depth,(Vs_km_s=Vs_3D,)) +GeoData size : (101, 73, 301) lon ϵ [ -10.0 - 40.0] lat ϵ [ 32.0 - 50.0] depth ϵ [ -350.0 km - -50.0 km] - fields: (:Vs_km_s,) + fields: (:Vs_km_s,) julia> Write_Paraview(Data_set, "MeRe_ElSharkawy") 1-element Vector{String}: "MeRe_ElSharkawy.vts" @@ -195,6 +195,3 @@ The full julia script that does it all is given [here](https://github.com/JuliaG ```julia julia> include("MeRe_ElSharkawy.jl") ``` - - - diff --git a/docs/src/man/tutorial_loadregular3DSeismicData_netCDF.md b/docs/src/man/tutorial_loadregular3DSeismicData_netCDF.md index 802f5bdc..fb68aeef 100644 --- a/docs/src/man/tutorial_loadregular3DSeismicData_netCDF.md +++ b/docs/src/man/tutorial_loadregular3DSeismicData_netCDF.md @@ -7,7 +7,7 @@ El-Sharkawy et al. (2020), *The Slab Puzzle of the Alpine‐Mediterranean Region ## Steps -#### 1. Download data +#### 1. Download data The data is can be downloaded from [https://ds.iris.edu/files/products/emc/emc-files/El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-0.0.nc](https://ds.iris.edu/files/products/emc/emc-files/El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-0.0.nc). Do that and start julia from the directory where it was downloaded. #### 2. Read data into Julia @@ -27,79 +27,79 @@ julia> ncinfo("El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-0.0.nc") ##### Dimensions ##### -Name Length +Name Length -------------------------------------------------------------------------------------------------------------------------- -depth 301 -latitude 100 -longitude 100 +depth 301 +latitude 100 +longitude 100 ##### Variables ##### -Name Type Dimensions +Name Type Dimensions -------------------------------------------------------------------------------------------------------------------------- -depth FLOAT depth -latitude FLOAT latitude -longitude FLOAT longitude -Vs FLOAT longitude latitude depth +depth FLOAT depth +latitude FLOAT latitude +longitude FLOAT longitude +Vs FLOAT longitude latitude depth ##### Attributes ##### -Variable Name Value +Variable Name Value -------------------------------------------------------------------------------------------------------------------------- -global author_email amr.elsharkawy@ifg.uni-kiel.de -global data_revision r.0.0 +global author_email amr.elsharkawy@ifg.uni-kiel.de +global data_revision r.0.0 global author_institution Institute of Geosciences, University of Kiel, Otto-Hahn Pl.. global keywords seismic tomography, shear wave, Mediterranean, phase veloc.. global acknowledgment Model was provided by Dr. El-Sharkawy, Institute of Geosci.. global history 2020-09-29 14:37:43 UTC Converted to netCDF by GeoCSV_2_ne.. -global repository_pid doi:10.17611/dp/emc.2020.meresvelsh.1 -global id MeRE2020 -global author_name Amr EL-Sharkawy -global comment model converted to netCDF by IRIS EMC +global repository_pid doi:10.17611/dp/emc.2020.meresvelsh.1 +global id MeRE2020 +global author_name Amr EL-Sharkawy +global comment model converted to netCDF by IRIS EMC global NCO netCDF Operators version 4.7.5 (Homepage = http://nco.sf.n.. global summary MeRE2020 is a high-resolution Shear‐wave velocity mod.. -global repository_institution IRIS DMC -global netCDF_file El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-1.0.nc -global author_url https://www.seismologie.ifg.uni-kiel.de -global reference El-Sharkawy, et al. (2020) -global repository_name EMC -global Conventions CF-1.0 -global Metadata_Conventions Unidata Dataset Discovery v1.0 +global repository_institution IRIS DMC +global netCDF_file El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-1.0.nc +global author_url https://www.seismologie.ifg.uni-kiel.de +global reference El-Sharkawy, et al. (2020) +global repository_name EMC +global Conventions CF-1.0 +global Metadata_Conventions Unidata Dataset Discovery v1.0 global title The Slab Puzzle of the Alpine‐Mediterranean Region: I.. -depth units km -depth long_name depth in km -depth display_name depth in km -depth positive down -latitude units degrees_north -latitude long_name Latitude; positive north -latitude standard_name latitude -longitude units degrees_east -longitude long_name Longitude; positive east -longitude standard_name longitude -Vs units km.s-1 -Vs long_name Shear wave velocity -Vs display_name S Velocity (km/s) +depth units km +depth long_name depth in km +depth display_name depth in km +depth positive down +latitude units degrees_north +latitude long_name Latitude; positive north +latitude standard_name latitude +longitude units degrees_east +longitude long_name Longitude; positive east +longitude standard_name longitude +Vs units km.s-1 +Vs long_name Shear wave velocity +Vs display_name S Velocity (km/s) ``` As you can see, there is quite some information present in this file. The most important information here are the different variables stored in this file: ```julia-repl ##### Variables ##### -Name Type Dimensions +Name Type Dimensions -------------------------------------------------------------------------------------------------------------------------- -depth FLOAT depth -latitude FLOAT latitude -longitude FLOAT longitude -Vs FLOAT longitude latitude depth +depth FLOAT depth +latitude FLOAT latitude +longitude FLOAT longitude +Vs FLOAT longitude latitude depth ``` -Here we can see that there are four variables in this file, three of them (depth,latitude, longitude) having a single dimension and the fourth one (Vs) having dimensions of the three previous variables. The three one-dimensional vectors therefore denote a regualr grid of coordinates defining the locations where Vs is stored. -To load this data, we can now simply use the commmand *ncread*: +Here we can see that there are four variables in this file, three of them (depth,latitude, longitude) having a single dimension and the fourth one (Vs) having dimensions of the three previous variables. The three one-dimensional vectors therefore denote a regular grid of coordinates defining the locations where Vs is stored. +To load this data, we can now simply use the command *ncread*: ```julia-repl julia> lat = ncread(filename,"latitude") julia> lon = ncread(filename,"longitude") julia> depth = ncread(filename,"depth") julia> Vs_3D = ncread(filename,"Vs") -julia> depth = -1 .* depth +julia> depth = -1 .* depth ``` Note that we multiplied depth with -1. This is necessary to make depth to be negative, as that is what `GeophysicalModelGenerator.jl` expects. @@ -110,15 +110,15 @@ julia> using GeophysicalModelGenerator Lon3D,Lat3D,Depth3D = LonLatDepthGrid(lon, lat, depth); ``` #### 4. Generate Paraview file -Once the 3D coordinate matrix has been generated, producing a Paraview file is done with the following command +Once the 3D coordinate matrix has been generated, producing a Paraview file is done with the following command ```julia -julia> Data_set = GeoData(Lon3D,Lat3D,Depth3D,(Vs_km_s=Vs_3D,)) -GeoData +julia> Data_set = GeoData(Lon3D,Lat3D,Depth3D,(Vs_km_s=Vs_3D,)) +GeoData size : (100, 100, 301) lon ϵ [ 29.0 - 51.0] lat ϵ [ -11.0 - 45.9900016784668] depth ϵ [ -350.0 km - -50.0 km] - fields: (:Vs_km_s,) + fields: (:Vs_km_s,) julia> Write_Paraview(Data_set, "MeRe_ElSharkawy") 1-element Vector{String}: "MeRe_ElSharkawy.vts" @@ -141,6 +141,3 @@ The full julia script that does it all is given [here](https://github.com/JuliaG ```julia julia> include("MeRe_ElSharkawy.jl") ``` - - - diff --git a/docs/src/scripts/LaPalma.dat b/docs/src/scripts/LaPalma.dat index f64edccf..89ae7bf0 100644 --- a/docs/src/scripts/LaPalma.dat +++ b/docs/src/scripts/LaPalma.dat @@ -3,7 +3,7 @@ # Data is provided in the variables seg_x, seg_y, seg_z and includes: # * coordinates of the delimiters between the segments (n-1 points) # * number of cells (n points) -# * bias coefficients (n points) +# * bias coefficients (n points) #=============================================================================== @@ -40,11 +40,11 @@ unit_stress = 1e9 # relative geometry tolerance for grid manipulations (default 1e-9) gtol = 1e-9 - -# Number of cells for all segments + +# Number of cells for all segments nel_x = 64 - nel_y = 64 - nel_z = 32 + nel_y = 64 + nel_z = 32 # Coordinates of all segments (including start and end points) coord_x = -50 50 @@ -62,7 +62,7 @@ unit_stress = 1e9 surf_max_angle = 45.0 # maximum angle with horizon (smoothed if larger) surf_topo_file = LaPalma_Topo.topo # initial topography file (redundant) erosion_model = 0 # erosion model [0-none (default), 1-infinitely fast] - sediment_model = 0 # sedimentation model [0-none (dafault), 1-prescribed rate] + sediment_model = 0 # sedimentation model [0-none (default), 1-prescribed rate] #=============================================================================== # Boundary conditions @@ -79,7 +79,7 @@ unit_stress = 1e9 # Free surface top boundary flag open_top_bound = 1 - + #=============================================================================== # Jacobian & residual evaluation parameters #=============================================================================== @@ -154,7 +154,7 @@ unit_stress = 1e9 out_avd = 1 # activate AVD phase output out_avd_pvd = 1 # activate writing .pvd file out_avd_ref = 1 # AVD grid refinement factor - + # free surface output out_surf = 1 # activate surface output out_surf_pvd = 1 # activate writing .pvd file @@ -172,19 +172,19 @@ unit_stress = 1e9 Name = air ID = 0 rho = 100 - alpha = 3e-5 + alpha = 3e-5 - # Linear Viscosity + # Linear Viscosity eta = 1e17 # Elastic parameters G = 1e10 nu = 0.2 - - # Thermal parameters + + # Thermal parameters k = 30 Cp = 1000 - + # Plastic parameters ch = 10e6 fr = 30 @@ -195,19 +195,19 @@ unit_stress = 1e9 Name = water ID = 1 rho = 100 - alpha = 3e-5 + alpha = 3e-5 - # Linear Viscosity + # Linear Viscosity eta = 1e17 # Elastic parameters G = 1e10 nu = 0.2 - - # Thermal parameters + + # Thermal parameters k = 30 Cp = 1000 - + # Plastic parameters ch = 10e6 fr = 30 @@ -220,7 +220,7 @@ unit_stress = 1e9 ID = 2 rho = 2900 alpha = 3e-5 - + # dislocation viscosity #disl_prof = Plagioclase_An75-Ranalli_1995 #disl_prof = Wet_Quarzite-Ranalli_1995 @@ -230,18 +230,18 @@ unit_stress = 1e9 # Elastic parameters G = 3e10 nu = 0.2 - + # Thermal parameters k = 3 Cp = 1000 - + # Plastic parameters ch = 10e6 fr = 30 - - - + + + # ------------------- Mantle lithosphere------------------- Name = Mantle @@ -255,11 +255,11 @@ unit_stress = 1e9 # Elastic parameters G = 6.5e10 nu = 0.2 - + # Thermal parameters k = 3.3 Cp = 1000 - + # Plastic parameters ch = 10e6 fr = 30 @@ -318,9 +318,9 @@ unit_stress = 1e9 # PETSc options #=============================================================================== - + # SNES - + -snes_npicard 5 -snes_max_it 200 -snes_rtol 1e-6 @@ -328,11 +328,11 @@ unit_stress = 1e9 -snes_PicardSwitchToNewton_rtol 1e-7 #-7 #-snes_NewtonSwitchToPicard_it 1 # number of Newton iterations after which we switch back to Picard # -snes_monitor -# -snes_linesearch_monitor +# -snes_linesearch_monitor # Jacobian solver - -js_ksp_type fgmres - -js_ksp_monitor + -js_ksp_type fgmres + -js_ksp_monitor -js_ksp_max_it 30 -js_ksp_rtol 1e-5 -js_ksp_atol 1e-6 @@ -345,17 +345,13 @@ unit_stress = 1e9 -gmg_pc_mg_galerkin -gmg_pc_mg_type multiplicative -gmg_pc_mg_cycle_type v - + -gmg_mg_levels_ksp_type richardson -gmg_mg_levels_ksp_richardson_scale 0.5 -gmg_mg_levels_ksp_max_it 5 -gmg_mg_levels_pc_type jacobi -crs_pc_factor_mat_solver_package pastix - - - - - + diff --git a/docs/src/scripts/LaPalma_example.jl b/docs/src/scripts/LaPalma_example.jl index e1575367..4ce8b412 100644 --- a/docs/src/scripts/LaPalma_example.jl +++ b/docs/src/scripts/LaPalma_example.jl @@ -2,13 +2,13 @@ # # ## Aim # In this tutorial, your will learn how to use real data to create a geodynamic model setup with LaMEM. We will use the data of La Palma, which is a volcanic island that started erupting in mid september 2021. -# LaMEM is a cartesian geodynamic model, which implies that you will have to transfer the data from `GeoData` to `CartData`. +# LaMEM is a cartesian geodynamic model, which implies that you will have to transfer the data from `GeoData` to `CartData`. # # # ## 1. Load data # We will use two types of data to create the model -# 1) Topography -# 2) Earthquake locations +# 1) Topography +# 2) Earthquake locations # We start with loading the required packages using GeophysicalModelGenerator, JLD2 @@ -17,41 +17,41 @@ using GMT # In case you managed to install GMT on your machine, you can automatically download the topography with Topo = ImportTopo(lon = [-18.7, -17.1], lat=[28.0, 29.2], file="@earth_relief_03s.grd") -# In case you did not manage that, we have prepared a JLD2 file [here](https://seafile.rlp.net/f/03286a46a9f040a69b0f/?dl=1). +# In case you did not manage that, we have prepared a JLD2 file [here](https://seafile.rlp.net/f/03286a46a9f040a69b0f/?dl=1). # Download it, and doublecheck that you are in the same directory as the data file with: pwd() -# Load the data: +# Load the data: Topo=load("Topo_LaPalma.jld2","Topo") # Next, lets load the seismicity. We have prepared a file with earthquake locations up to early November (from [https://www.ign.es/web/ign/portal/vlc-catalogo](https://www.ign.es/web/ign/portal/vlc-catalogo)). # Download that [here](https://seafile.rlp.net/f/03286a46a9f040a69b0f/?dl=1) data_all_EQ = load("EQ_Data.jld2","data_all_EQ") -# Write the data to paraview with +# Write the data to paraview with Write_Paraview(data_all_EQ,"data_all_EQ",PointsData=true) Write_Paraview(Topo,"Topo") # As earthquakes are point-wise data, you have to specify this. # ![LaPalma_EQTopo_GeoData](../assets/img/TopoEQs_LaPalma_GeoData.png) -# Note that this data is not in "easy" coordinates (see coordinate axis in the plot, where `z` is *not* pointing upwards). +# Note that this data is not in "easy" coordinates (see coordinate axis in the plot, where `z` is *not* pointing upwards). -# ## 2. Convert data -# In order to create model setups, it is helpful to first transfer the data to Cartesian. +# ## 2. Convert data +# In order to create model setups, it is helpful to first transfer the data to Cartesian. # This requires us to first determine a *projection point*, that is fixed. Often, it is helpful to use the center of the topography for this. In the present example, we will center the model around La Palma itself: -proj = ProjectionPoint(Lon=-17.84, Lat=28.56) +proj = ProjectionPoint(Lon=-17.84, Lat=28.56) # Once this is done you can convert the topographic data to the cartesian reference frame EQ_cart = Convert2CartData(data_all_EQ, proj); Topo_cart = Convert2CartData(Topo, proj) -# It is important to realize that the cartesian coordinates of the topographic grid is no longer strictly orthogonal after this conversion. You don't notice that in the current example, as the model domain is rather small. -# In other cases, however, this is quite substantial (e.g., India-Asia collision zone). +# It is important to realize that the cartesian coordinates of the topographic grid is no longer strictly orthogonal after this conversion. You don't notice that in the current example, as the model domain is rather small. +# In other cases, however, this is quite substantial (e.g., India-Asia collision zone). # LaMEM needs an orthogonal grid of topography, which we can create with: Topo_LaMEM = CartData(XYZGrid(-70:.2:70,-60:.2:70,0)); # In a next step, the routine `ProjectCartData` projects a `GeoData` structure to a `CartData` struct -Topo_LaMEM = ProjectCartData(Topo_LaMEM, Topo, proj) +Topo_LaMEM = ProjectCartData(Topo_LaMEM, Topo, proj) # Let's have a look at the data: Write_Paraview(EQ_cart,"EQ_cart",PointsData=true) @@ -59,20 +59,20 @@ Write_Paraview(Topo_LaMEM,"Topo_LaMEM") # ## 3. Create LaMEM setup # -# In a next step, we need to read the LaMEM input file of the simulation. In particular, this will read the lateral dimensions of the grid and +# In a next step, we need to read the LaMEM input file of the simulation. In particular, this will read the lateral dimensions of the grid and # the number of control volumes (elements), you want to apply in every direction. -# The LaMEM input file can be downloaded [here](../scripts/LaPalma.dat). +# The LaMEM input file can be downloaded [here](../scripts/LaPalma.dat). # Make sure you are in the same directory as the *.dat file & execute the following command Grid = ReadLaMEM_InputFile("LaPalma.dat") -# The `LaMEM_grid` structure contains the number of elements in every direction and the number of markers in every cell. +# The `LaMEM_grid` structure contains the number of elements in every direction and the number of markers in every cell. # It also contains `Grid.X`, `Grid.Y`, `Grid.Z`, which are the coordinates of each of the markers in the 3 directions. -# -# In a next step we need to give each of these points a `Phase` number (which is an integer, that indicates the type of the rock that point has), as well as the temperature (in Celcius). +# +# In a next step we need to give each of these points a `Phase` number (which is an integer, that indicates the type of the rock that point has), as well as the temperature (in Celsius). Phases = ones(Int32,size(Grid.X))*2; Temp = ones(Float64,size(Grid.X)); -# In this example we set the temperature based on the depth, assuming a constant geotherm. Depth is given in kilometers +# In this example we set the temperature based on the depth, assuming a constant geotherm. Depth is given in kilometers Geotherm = 30; Temp = -Grid.Z.*Geotherm; @@ -86,7 +86,7 @@ ind = findall( Grid.Z .< -40); Phases[ind] .= 3; # Everything above the free surface is assumed to be "air" -ind = AboveSurface(Grid, Topo_cart); +ind = AboveSurface(Grid, Topo_cart); Phases[ind] .= 0; # And all "air" points that are below sea-level becomes "water" @@ -94,9 +94,9 @@ ind = findall( (Phases.==0) .& (Grid.Z .< 0)); Phases[ind] .= 1; # You can interpret the seismicity in different ways. If you assume that there are fully molten magma chambers, there should be no earthquakes within the magma chamber (as stresses are liklely to be very small). -# If, however, this is a partially molten mush, extraction of melt of that mush will change the fluid pressure and may thus release build-up stresses. +# If, however, this is a partially molten mush, extraction of melt of that mush will change the fluid pressure and may thus release build-up stresses. # We will assume the second option in our model setup. -# +# # Looking at the seismicity, there is a swarm at around 35 km depth AddSphere!(Phases,Temp,Grid, cen=(0,0,-35), radius=5, phase=ConstantPhase(5), T=ConstantTemp(1200)); @@ -122,4 +122,4 @@ Save_LaMEMMarkersParallel(Model3D) #src Note: The markdown page is generated using: -#src Literate.markdown("LaPalma_example.jl",outputdir="../man/", execute=true) \ No newline at end of file +#src Literate.markdown("LaPalma_example.jl",outputdir="../man/", execute=true) diff --git a/src/Visualisation.jl b/ext/GLMakie_Visualisation.jl similarity index 95% rename from src/Visualisation.jl rename to ext/GLMakie_Visualisation.jl index 162c9121..6184ce5c 100644 --- a/src/Visualisation.jl +++ b/ext/GLMakie_Visualisation.jl @@ -1,8 +1,22 @@ +module GLMakie_Visualisation # This contains visualisation widgets which are optionally made available when GLMakie is loaded along with GMG -using .GLMakie, Statistics + +using Statistics +using GeophysicalModelGenerator: LonLatDepthGrid, GeoData, CartData, km + +# We do not check `isdefined(Base, :get_extension)` as recommended since +# Julia v1.9.0 does not load package extensions when their dependency is +# loaded from the main environment. +if VERSION >= v"1.9.1" + using GLMakie +else + using ..GLMakie +end export Visualise +println("Loading GLMakie extensions for GMG") + """ Visualise(DataSet; Topography=Topo_Data, Topo_range=nothing) @@ -254,4 +268,7 @@ function Visualise(Data; Topography=nothing, Topo_range=nothing) display(fig) return nothing +end + + end \ No newline at end of file diff --git a/src/GMT_utils.jl b/ext/GMT_utils.jl similarity index 53% rename from src/GMT_utils.jl rename to ext/GMT_utils.jl index 890b72ac..65987fd7 100644 --- a/src/GMT_utils.jl +++ b/ext/GMT_utils.jl @@ -1,12 +1,26 @@ -# NOTE: these are useful routines that are only loaded when the GMT package is already loaded in the REPL -using .GMT +# NOTE: these are useful routines that are only made available when the GMT package is already loaded in the REPL +module GMT_utils + +import GeophysicalModelGenerator: ImportTopo, ImportGeoTIFF + +# We do not check `isdefined(Base, :get_extension)` as recommended since +# Julia v1.9.0 does not load package extensions when their dependency is +# loaded from the main environment. +if VERSION >= v"1.9.1" + using GMT +else + using ..GMT +end + +using GeophysicalModelGenerator: LonLatDepthGrid, GeoData, UTMData, km, remove_NaN_Surface! + +println("Loading GMT routines within GMG") -export ImportTopo """ Topo = ImportTopo(limits; file::String="@earth_relief_01m.grd") -Uses GMT to download the topography of a certain region, specified with limits=[lon_min, lon_max, lat_min, lat_max] +Uses `GMT` to download the topography of a certain region, specified with limits=[lon_min, lon_max, lat_min, lat_max] Note: ==== @@ -31,7 +45,7 @@ Note: | "@earth\\_relief\\_30m" | 30 arc min | ETOPO1 after Gaussian spherical filtering (55 km fullwidth) | | "@earth\\_relief\\_60m" | 60 arc min | ETOPO1 after Gaussian spherical filtering (111 km fullwidth)| -*Note*: this routine is only available once the GMT.jl package is loaded on the REPL +*Note*: this routine is only available once the GMT.jl package is loaded in the REPL # Example ```julia-repl @@ -90,3 +104,80 @@ julia> Topo = ImportTopo(lon=(-50, -40), lat=(-10,-5), file="@earth_relief_30s.g """ ImportTopo(; lat=[37,49], lon=[4,20], file::String="@earth_relief_01m.grd") = ImportTopo([lon[1],lon[2], lat[1], lat[2]], file=file) + + +""" + data_GMT = ImportGeoTIFF(fname::String; fieldname=:layer1, negative=false, iskm=true, NorthernHemisphere=true, constantDepth=false, removeNaN_z=false, removeNaN_field=false) + +This imports a GeoTIFF dataset (usually containing a surface of some sort) using GMT. +The file should either have `UTM` coordinates of `longlat` coordinates. If it doesn't, you can +use QGIS to convert it to `longlat` coordinates. + +Optional keywords: +- `fieldname` : name of the field (default=:layer1) +- `negative` : if true, the depth is multiplied by -1 (default=false) +- `iskm` : if true, the depth is multiplied by 1e-3 (default=true) +- `NorthernHemisphere`: if true, the UTM zone is set to be in the northern hemisphere (default=true); only relevant if the data uses UTM projection +- `constantDepth`: if true we will not warp the surface by z-values, but use a constant value instead +- `removeNaN_z` : if true, we will remove NaN values from the z-dataset +""" +function ImportGeoTIFF(fname::String; fieldname=:layer1, negative=false, iskm=true, NorthernHemisphere=true, constantDepth=false, removeNaN_z=false, removeNaN_field=false) + G = gmtread(fname); + + # Transfer to GeoData + nx,ny = length(G.x)-1, length(G.y)-1 + Lon,Lat,Depth = LonLatDepthGrid(G.x[1:nx],G.y[1:ny],0); + if hasfield(typeof(G),:z) + Depth[:,:,1] = G.z'; + if negative + Depth[:,:,1] = -G.z'; + end + if iskm + Depth *= 1e-3*km; + end + end + + # Create GeoData structure + data = zero(Lon) + if hasfield(typeof(G),:z) + data = Depth + + elseif hasfield(typeof(G),:image) + if length(size(G.image)) == 3 + data = permutedims(G.image,[2, 1, 3]); + elseif length(size(G.image)) == 2 + data[:,:,1] = G.image' + end + + end + + if removeNaN_z + remove_NaN_Surface!(Depth, Lon, Lat) + end + if removeNaN_field + remove_NaN_Surface!(data, Lon, Lat) + end + data_field = NamedTuple{(fieldname,)}((data,)); + + if constantDepth + Depth = zero(Lon) + end + + if contains(G.proj4,"utm") + zone = parse(Int64,split.(split(G.proj4,"zone=")[2]," ")[1]); # retrieve UTM zone + data_GMT = UTMData(Lon, Lat, Depth, zone, NorthernHemisphere, data_field) + + elseif contains(G.proj4,"longlat") + data_GMT = GeoData(Lon, Lat, Depth, data_field) + + else + error("I'm sorry, I don't know how to handle this projection yet: $(G.proj4)\n + We recommend that you transfer your GeoTIFF to longlat by using QGIS \n + Open the GeoTIFF there and Export -> Save As , while selecting \"EPSG:4326 - WGS 84\" projection.") + end + + return data_GMT +end + + +end diff --git a/markers/mdb.00000000.dat b/markers/mdb.00000000.dat new file mode 100644 index 00000000..21644636 Binary files /dev/null and b/markers/mdb.00000000.dat differ diff --git a/src/GeophysicalModelGenerator.jl b/src/GeophysicalModelGenerator.jl index 8cb0d613..96f7534a 100644 --- a/src/GeophysicalModelGenerator.jl +++ b/src/GeophysicalModelGenerator.jl @@ -1,28 +1,26 @@ module GeophysicalModelGenerator using Base: String, show_index, Tuple, FieldDescStorage -using Requires - # Load & export some useful commands/functions from GeoParams: import GeoParams using .GeoParams -export - @u_str, uconvert, upreffered, unit, ustrip, NoUnits, # Units - GeoUnit, GEO_units, SI_units, NO_units, AbstratGeoUnits, +export + @u_str, uconvert, upreffered, unit, ustrip, NoUnits, # Units + GeoUnit, GEO_units, SI_units, NO_units, AbstractGeoUnits, Nondimensionalize, Nondimensionalize!, Dimensionalize, Dimensionalize!, - superscript, upreferred, GEO, SI, NONE, isDimensional, + superscript, upreferred, GEO, SI, NONE, isDimensional, km, m, cm, mm, Myrs, yr, s, MPa, Pa, Pas, K, C, kg, mol, isDimensional, Value, NumValue, Unit, UnitValue export ReadCSV_LatLon, meshgrid, voxGrav -abstract type AbstractGeneralGrid end # general grid types +abstract type AbstractGeneralGrid end # general grid types export AbstractGeneralGrid # julia standard library packages -using DelimitedFiles, Statistics +using DelimitedFiles, Statistics # other packages using WriteVTK, Colors, MeshIO, FileIO, Interpolations, Geodesy @@ -46,17 +44,24 @@ include("ProfileProcessing.jl") include("IO.jl") # Add optional routines (only activated when the packages are loaded) -function __init__() - @require GMT = "5752ebe1-31b9-557e-87aa-f909b540aa54" begin - println("Loading GMT routines within GMG") - @eval include("./GMT_utils.jl") - end - @require GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a" begin - println("Loading GLMakie plotting routines within GMG") - @eval include("./Visualisation.jl") - end -end +# GMT routines + +""" +Optional routine that imports topography. It requires you to load `GMT` +""" +function ImportTopo end +function ImportGeoTIFF end +export ImportTopo, ImportGeoTIFF + +# GLMakie routines + +""" +Interactive widget that allows you to explore a 3D data set `DataSet` in an interactive manner. +It requires you to load `GLMakie` +""" +function Visualise end +export Visualise end diff --git a/src/LaMEM_io.jl b/src/LaMEM_io.jl index 8b92e3ed..bad3ec91 100644 --- a/src/LaMEM_io.jl +++ b/src/LaMEM_io.jl @@ -4,9 +4,9 @@ using Glob using Interpolations # LaMEM I/O -# +# # These are routines that help to create a LaMEM marker files from a ParaviewData structure, which can be used to perform geodynamic simulations -# We also include routines with which we can read LaMEM *.pvtr files into julia +# We also include routines with which we can read LaMEM *.pvtr files into julia export LaMEM_grid, ReadLaMEM_InputFile export Save_LaMEMMarkersParallel, Save_LaMEMTopography @@ -21,47 +21,47 @@ struct LaMEM_grid <: AbstractGeneralGrid nmark_y :: Int64 nmark_z :: Int64 # total number of markers - nump_x :: Int64 + nump_x :: Int64 nump_y :: Int64 nump_z :: Int64 # total number of elements in grid nel_x :: Int64 nel_y :: Int64 nel_z :: Int64 - # exent of the grid + # extent of the grid W :: Float64 L :: Float64 H :: Float64 # start and end coordinates of grid segments - coord_x - coord_y + coord_x + coord_y coord_z # 1D vectors with marker coordinates x_vec y_vec z_vec # grid with marker coordinates - X - Y + X + Y Z # 1D vectors with node coordinates xn_vec yn_vec zn_vec # grid with node coordinates - Xn - Yn - Zn + Xn + Yn + Zn end -""" +""" ParaviewData(Grid::LaMEM_grid, fields::NamedTuple) Creates a `ParaviewData` struct from a LaMEM grid and from fields stored on that grid. Note that one needs to have a field `Phases` and optionally a field `Temp` to create LaMEM marker files. """ ParaviewData(Grid::LaMEM_grid, fields::NamedTuple) = ParaviewData(Grid.X, Grid.Y, Grid.Z, fields) -""" +""" CartData(Grid::LaMEM_grid, fields::NamedTuple) Creates a `CartData` struct from a LaMEM grid and from fields stored on that grid. Note that one needs to have a field `Phases` and optionally a field `Temp` to create LaMEM marker files. @@ -142,14 +142,14 @@ end """ - This parses a LaMEM command line argument string and checks if tyhe keyword exists there + This parses a LaMEM command line argument string and checks if the keyword exists there """ function ParseValue_CommandLineArgs(args,keyword,type, value) args_vec = split(args,"-"*keyword) - - if length(args_vec)==2 + + if length(args_vec)==2 # we found the keyword - args_vec_keyword = split(args_vec[2]) + args_vec_keyword = split(args_vec[2]) str = args_vec_keyword[1] # first block after keyword is what we want str_strip = replace(str, "," => " ") # in case we have an array of values value = parse.(type, split(str_strip)) # puts an array of values in a vector @@ -164,15 +164,15 @@ end """ - Grid::LaMEM_grid = ReadLaMEM_InputFile(file, args::Union{String,Nothing}=nothing) + Grid::LaMEM_grid = ReadLaMEM_InputFile(file, args::Union{String,Nothing}=nothing) Parses a LaMEM input file and stores grid information in the `Grid` structure. -Optionally, you can pass LaMEM command-line arguments as well. +Optionally, you can pass LaMEM command-line arguments as well. # Example 1 ```julia -julia> Grid = ReadLaMEM_InputFile("SaltModels.dat") -LaMEM Grid: +julia> Grid = ReadLaMEM_InputFile("SaltModels.dat") +LaMEM Grid: nel : (32, 32, 32) marker/cell : (3, 3, 3) markers : (96, 96, 96) @@ -183,8 +183,8 @@ z ϵ [-2.0 : 0.0] # Example 2 (with command-line arguments) ```julia -julia> Grid = ReadLaMEM_InputFile("SaltModels.dat", args="-nel_x 64 -coord_x -4,4") -LaMEM Grid: +julia> Grid = ReadLaMEM_InputFile("SaltModels.dat", args="-nel_x 64 -coord_x -4,4") +LaMEM Grid: nel : (64, 32, 32) marker/cell : (3, 3, 3) markers : (192, 96, 96) @@ -217,7 +217,7 @@ function ReadLaMEM_InputFile(file; args::Union{String,Nothing}=nothing ) bias_y = ParseValue_LaMEM_InputFile(file,"bias_y",Float64, args=args); bias_z = ParseValue_LaMEM_InputFile(file,"bias_z",Float64, args=args); - # compute infromation from file + # compute information from file W = coord_x[end]-coord_x[1]; L = coord_y[end]-coord_y[1]; H = coord_z[end]-coord_z[1]; @@ -236,15 +236,15 @@ function ReadLaMEM_InputFile(file; args::Union{String,Nothing}=nothing ) zn, z = Create1D_grid_vector(coord_z, nel_z, nmark_z, nseg_z, bias_z) # node grid - Xn,Yn,Zn = XYZGrid(xn, yn, zn); + Xn,Yn,Zn = XYZGrid(xn, yn, zn); # marker grid X,Y,Z = XYZGrid(x, y, z); - + # finish Grid Grid = LaMEM_grid( nmark_x, nmark_y, nmark_z, nump_x, nump_y, nump_z, - nel_x_tot, nel_y_tot, nel_z_tot, + nel_x_tot, nel_y_tot, nel_z_tot, W, L, H, coord_x, coord_y, coord_z, x, y, z, @@ -259,13 +259,13 @@ end Returns 1D coordinate vectors of grid points and of marker locations for a regular spacing """ function Create1D_grid_vector(coord::Vector{Float64}, nel::Int64, nmark::Int64, nseg::Union{Nothing, Int64}, bias::Union{Nothing, Float64}) - W = coord[end] - coord[1] - Δ = W / nel; + W = coord[end] - coord[1] + Δ = W / nel; xn = range(coord[1], coord[end], length=nel+1); # coordinates of the normals to the cells - + nump = nmark*nel - Δ_m = W / nump; - x = range(coord[1]+ Δ_m/2, coord[end] - Δ_m/2, length=nump); + Δ_m = W / nump; + x = range(coord[1]+ Δ_m/2, coord[end] - Δ_m/2, length=nump); return xn, x end @@ -370,12 +370,12 @@ end """ Save_LaMEMMarkersParallel(Grid::CartData; PartitioningFile=empty, directory="./markers", verbose=true, is64bit=false) -Saves a LaMEM marker file from the `CartData` structure `Grid`. It must have a field called `Phases`, holding phase information (as integers) and optionally a field `Temp` with temperature info. +Saves a LaMEM marker file from the `CartData` structure `Grid`. It must have a field called `Phases`, holding phase information (as integers) and optionally a field `Temp` with temperature info. It is possible to provide a LaMEM partitioning file `PartitioningFile`. If not, output is assumed to be for one processor. By default it is assumed that the partitioning file was generated on a 32bit PETSc installation. If `Int64` was used instead, set the flag. The size of `Grid` should be consistent with what is provided in the LaMEM input file. In practice, the size of the mesh can be retrieved from a LaMEM input file using `ReadLaMEM_InputFile`. -# Example +# Example ``` julia> Grid = ReadLaMEM_InputFile("LaMEM_input_file.dat") @@ -384,7 +384,7 @@ julia> Temp = ones(Float64,size(Grid.X)); julia> Model3D = CartData(Grid, (Phases=Phases,Temp=Temp)) julia> Save_LaMEMMarkersParallel(Model3D) Writing LaMEM marker file -> ./markers/mdb.00000000.dat -``` +``` If you want to create a LaMEM input file for multiple processors: ``` julia> Save_LaMEMMarkersParallel(Model3D, PartitioningFile="ProcessorPartitioning_4cpu_1.2.2.bin") @@ -400,22 +400,22 @@ function Save_LaMEMMarkersParallel(Grid::CartData; PartitioningFile=empty, direc x = ustrip.(Grid.x.val[:,1,1]); y = ustrip.(Grid.y.val[1,:,1]); z = ustrip.(Grid.z.val[1,1,:]); - + if haskey(Grid.fields,:Phases) - Phases = Grid.fields[:Phases]; + Phases = Grid.fields[:Phases]; else error("You must provide the field :Phases in the structure") end - + if haskey(Grid.fields,:Temp) - Temp = Grid.fields[:Temp]; + Temp = Grid.fields[:Temp]; else if verbose println("Field :Temp is not provided; setting it to zero") end Temp = zeros(size(Phases)); end - + if PartitioningFile==empty # in case we run this on 1 processor only Nprocx = 1; @@ -423,8 +423,8 @@ function Save_LaMEMMarkersParallel(Grid::CartData; PartitioningFile=empty, direc Nprocz = 1; xc,yc,zc = x,y,z; else - Nprocx,Nprocy,Nprocz, - xc,yc,zc, + Nprocx,Nprocy,Nprocz, + xc,yc,zc, nNodeX,nNodeY,nNodeZ = GetProcessorPartitioning(PartitioningFile, is64bit=is64bit) if verbose @show Nprocx,Nprocy,Nprocz, xc,yc,zc, nNodeX,nNodeY,nNodeZ @@ -448,7 +448,7 @@ function Save_LaMEMMarkersParallel(Grid::CartData; PartitioningFile=empty, direc # Loop over all processors partition for n=1:Nproc # Extract coordinates for current processor - + part_x = ustrip.(Grid.x.val[x_start[n]:x_end[n],y_start[n]:y_end[n],z_start[n]:z_end[n]]); part_y = ustrip.(Grid.y.val[x_start[n]:x_end[n],y_start[n]:y_end[n],z_start[n]:z_end[n]]); part_z = ustrip.(Grid.z.val[x_start[n]:x_end[n],y_start[n]:y_end[n],z_start[n]:z_end[n]]); @@ -459,9 +459,9 @@ function Save_LaMEMMarkersParallel(Grid::CartData; PartitioningFile=empty, direc # Information vector per processor num_prop = 5; # number of properties we save [x/y/z/phase/T] lvec_info = num_particles; - + lvec_prtcls = zeros(Float64,num_prop*num_particles); - + lvec_prtcls[1:num_prop:end] = part_x[:]; lvec_prtcls[2:num_prop:end] = part_y[:]; lvec_prtcls[3:num_prop:end] = part_z[:]; @@ -512,7 +512,7 @@ function get_numscheme(Nprocx,Nprocy,Nprocz) nix = zeros(Int64, Nprocx*Nprocy*Nprocz) njy = zeros(Int64, Nprocx*Nprocy*Nprocz) nkz = zeros(Int64, Nprocx*Nprocy*Nprocz) - + num=0; for k=1:Nprocz for j=1:Nprocy @@ -525,7 +525,7 @@ function get_numscheme(Nprocx,Nprocy,Nprocz) end end end - + return n,nix,njy,nkz end @@ -546,9 +546,9 @@ function PetscBinaryWrite_Vec(filename, A) write(f,hton(Float64(1211214))); # header (not actually used) write(f,hton(Float64(nummark))); # info about # of markers written - + for i=2:n - write(f,hton(Float64(A[i]))); # Write data itself + write(f,hton(Float64(A[i]))); # Write data itself end end @@ -560,7 +560,7 @@ end nProcX,nProcY,nProcZ, xc,yc,zc, nNodeX,nNodeY,nNodeZ = GetProcessorPartitioning(filename; is64bit=false) Reads a LaMEM processor partitioning file, used to create marker files, and returns the parallel layout. -By default this is done for a 32bit PETSc installation, which will fail if you actually use a 64bit version. +By default this is done for a 32bit PETSc installation, which will fail if you actually use a 64bit version. """ function GetProcessorPartitioning(filename; is64bit=false) @@ -571,7 +571,7 @@ function GetProcessorPartitioning(filename; is64bit=false) typ=Int32 end io = open(filename, "r") - + nProcX = ntoh(read(io,typ)) nProcY = ntoh(read(io,typ)) @@ -589,17 +589,17 @@ function GetProcessorPartitioning(filename; is64bit=false) xcoor = [ntoh(read(io,Float64)) for i=1:nNodeX].*CharLength; ycoor = [ntoh(read(io,Float64)) for i=1:nNodeY].*CharLength; zcoor = [ntoh(read(io,Float64)) for i=1:nNodeZ].*CharLength; - + xc = xcoor[iX .+ 1] yc = ycoor[iY .+ 1] zc = zcoor[iZ .+ 1] close(io) - return nProcX,nProcY,nProcZ, - xc,yc,zc, + return nProcX,nProcY,nProcZ, + xc,yc,zc, nNodeX,nNodeY,nNodeZ - + end @@ -609,7 +609,7 @@ end coord, Data_3D_Arrays, Name_Vec = ReadData_VTR(fname) Reads a VTR (structured grid) VTK file `fname` and extracts the coordinates, data arrays and names of the data. -In general, this only contains a piece of the data, and one should open a `*.pvtr` file to retrieve the full data +In general, this only contains a piece of the data, and one should open a `*.pvtr` file to retrieve the full data """ function ReadData_VTR(fname, FullSize) file = open(fname, "r") @@ -623,7 +623,7 @@ function ReadData_VTR(fname, FullSize) while header==true line = readline(file) - line_strip = lstrip(line) + line_strip = lstrip(line) if startswith(line_strip, "") @@ -644,32 +644,32 @@ function ReadData_VTR(fname, FullSize) end if startswith(line_strip, "") - line_strip = lstrip(readline(file)) + line_strip = lstrip(readline(file)) while ~startswith(line_strip, "") Type, Name, NumberOfComponents, Offset = Parse_VTR_Line(line_strip); num += 1 Offset_Vec = [Offset_Vec; Offset]; - Name_Vec = [Name_Vec; Name]; - Type_Vec = [Type_Vec; Type]; + Name_Vec = [Name_Vec; Name]; + Type_Vec = [Type_Vec; Type]; NumComp_Vec = [NumComp_Vec; NumberOfComponents]; - line_strip = lstrip(readline(file)) - end + line_strip = lstrip(readline(file)) + end end if startswith(line_strip, "") - line_strip = lstrip(readline(file)) + line_strip = lstrip(readline(file)) while ~startswith(line_strip, "") Type, Name, NumberOfComponents, Offset = Parse_VTR_Line(line_strip); num += 1 - + Offset_Vec = [Offset_Vec; Offset]; - Name_Vec = [Name_Vec; Name]; - Type_Vec = [Type_Vec; Type]; + Name_Vec = [Name_Vec; Name]; + Type_Vec = [Type_Vec; Type]; NumComp_Vec = [NumComp_Vec; NumberOfComponents]; - line_strip = lstrip(readline(file)) + line_strip = lstrip(readline(file)) # if we have cell Data, for some reason we need to increment this by one. PieceExtent[1:2:end] .+= 1 - end - + end + end if startswith(line_strip, " Data = ReadData_PVTR("Haaksbergen.pvtr", "./Timestep_00000005_3.35780500e-01/") -ParaviewData +ParaviewData size : (33, 33, 33) x ϵ [ -3.0 : 3.0] y ϵ [ -2.0 : 2.0] @@ -841,13 +841,13 @@ function ReadData_PVTR(fname, dir) while header==true line = readline(file) - line_strip = lstrip(line) + line_strip = lstrip(line) if startswith(line_strip, " 1 || length(unique(trunc.(diff(Topo.y.val[1,:,1]), digits=8))) > 1 x1 = ustrip(Topo.x.val[end,1,1]); y1 = ustrip(Topo.y.val[1,end,1]); dx = (x1-x0) / (nx-1); dy = (y1-y0) / (ny-1); - + itp = LinearInterpolation((Topo.x.val[:,1,1], Topo.y.val[1,:,1]), ustrip.(Topo.fields.Topography[:,:,1])); Topo_itp = [itp(x,y) for x in x0:dx:x1, y in y0:dy:y1]; @@ -961,7 +961,7 @@ function Save_LaMEMTopography(Topo::CartData, filename::String) dy = ustrip(Topo.y.val[2,2,1]) - y0 # Code the topograhic data into a vector Topo_vec = [ nx;ny;x0;y0;dx;dy; ustrip.(Topo.fields.Topography[:])] - end + end # Write as PetscBinary file PetscBinaryWrite_Vec(filename, Topo_vec) @@ -982,16 +982,16 @@ Likewise for the `mpiexec` directory (if not specified it is assumed to be avail function CreatePartitioningFile(LaMEM_input::String,NumProc::Int64; LaMEM_dir::String=pwd(), LaMEM_options="", MPI_dir="", verbose=true) # Create string to execute LaMEM - mpi_str = MPI_dir*"mpiexec -n $(NumProc) " + mpi_str = MPI_dir*"mpiexec -n $(NumProc) " LaMEM_str = LaMEM_dir*"/"*"LaMEM -ParamFile "*LaMEM_input*" -mode save_grid " str = mpi_str*LaMEM_str - + if verbose==true println("Executing command: $str") end # Run exit=run(`sh -c $str`, wait=false); - + # Retrieve newest file if success(exit) files=readdir(glob"ProcessorPartitioning_*.bin") @@ -1002,7 +1002,7 @@ function CreatePartitioningFile(LaMEM_input::String,NumProc::Int64; LaMEM_dir::S id = findall(time_modified.==maximum(time_modified)) # last modified PartFile = files[id] if verbose==true - println("Successfuly generated PartitioningFile: $(PartFile[1])") + println("Successfully generated PartitioningFile: $(PartFile[1])") end else error("Something went wrong with executing command ") diff --git a/src/ProfileProcessing.jl b/src/ProfileProcessing.jl index 13b737d3..2fb09fe3 100644 --- a/src/ProfileProcessing.jl +++ b/src/ProfileProcessing.jl @@ -1,4 +1,4 @@ -# +# # this is ProfileProcessing.jl # It contains functions and type definitions to gather selected data for given profiles @@ -7,7 +7,7 @@ export ExtractProfileData!, ReadPickedProfiles import Base: show """ -Structure that holds profile data (interpolated/projected on the profile) +Structure that holds profile data (interpolated/projected on the profile) struct ProfileData vertical :: Bool # vertical:true, horizontal:false @@ -69,7 +69,7 @@ function show(io::IO, g::ProfileData) if !isnothing(g.PointData) println(io, " PointData: $(keys(g.PointData)) ") end - + return nothing end @@ -78,26 +78,26 @@ end Structure that stores info about a GMG Dataset, which is useful to collect a wide variety of datasets. - Name :: String # Name of the dataset -- Type :: String # Volumetric, Surface, Point, Screenshot -- DirName :: String # Directory name or url of dataset +- Type :: String # Volumetric, Surface, Point, Screenshot +- DirName :: String # Directory name or url of dataset - active :: Bool # should this data be loaded or not? """ mutable struct GMG_Dataset Name :: String # Name of the dataset - Type :: String # Volumetric, Surface, Point, Screenshot - DirName :: String # Directory name or url of dataset + Type :: String # Volumetric, Surface, Point, Screenshot + DirName :: String # Directory name or url of dataset active :: Bool # active in the GUI or not? - function GMG_Dataset(Name::String,Type::String,DirName::String,active::Bool=false) + function GMG_Dataset(Name::String,Type::String,DirName::String,active::Bool=false) Type = strip(Type) Name = strip(Name) DirName = strip(DirName) - + if !any(occursin.(Type,["Volume","Surface","Point","Screenshot","Topography"])) error("Type should be either: Volume,Surface,Point,Topography or Screenshot. Is: $Type.") end - + if DirName[end-4:end] == ".jld2" DirName = DirName[1:end-5] end @@ -107,15 +107,15 @@ mutable struct GMG_Dataset end -# Print info +# Print info function show(io::IO, g::GMG_Dataset) if g.active str_act = "(active) :" else str_act = "(inactive):" - end + end print(io, "GMG $(g.Type) Dataset $str_act $(g.Name) @ $(g.DirName)") - + return nothing end @@ -144,7 +144,7 @@ Note that the first line of the file is skipped. Here, the meaning of the variables is: - `Name`: The name of the dataset to be loaded -- `Location`: the location of the file (directory and filename) on your local machine, or an url where we can download the file from the web. The url is expected to start with "http". +- `Location`: the location of the file (directory and filename) on your local machine, or an url where we can download the file from the web. The url is expected to start with "http". - `Type`: type of the dataset (Volume, Surface, Point, Screenshot) - `Active`: Do we want this file to be loaded or not? Optional parameter that defaults to `true` @@ -153,7 +153,7 @@ function Load_Dataset_file(file_datasets::String) datasets = readdlm(file_datasets,',',skipstart =1); # read information on datasets to be used from text file n = size(datasets,1) - # Deal with last collumn (in case it is not specified or not specified everywhere) + # Deal with last column (in case it is not specified or not specified everywhere) if size(datasets,2)==4 active = datasets[:,4] active = replace(active,""=>true) @@ -170,7 +170,7 @@ function Load_Dataset_file(file_datasets::String) return Datasets end -""" +""" Data = load_GMG(Datasets::Vector{GMG_Dataset}) This loads all the active datasets in `Datasets`, and returns a NamedTuple with Volume, Surface, Point, Screenshot and Topography data @@ -178,7 +178,7 @@ This loads all the active datasets in `Datasets`, and returns a NamedTuple with """ function load_GMG(Datasets::Vector{GMG_Dataset}) - + DataPoint = NamedTuple(); DataSurf = NamedTuple(); DataScreenshot = NamedTuple(); @@ -187,7 +187,7 @@ function load_GMG(Datasets::Vector{GMG_Dataset}) for data in Datasets if data.active # load into NamedTuple (I'm sure this can be done more compact somehow..) - loaded_data = load_GMG(data) + loaded_data = load_GMG(data) if data.Type=="Volume" DataVol = merge(DataVol,loaded_data) elseif data.Type=="Surface" @@ -211,7 +211,7 @@ end VolData_combined = combine_VolData(VolData::NamedTuple; lat=nothing, lon=nothing, depth=nothing, dims=(100,100,100), dataset_preferred = 1) -This takes different volumetric datasets (specified in `VolData`) & merges them into a single one. +This takes different volumetric datasets (specified in `VolData`) & merges them into a single one. You need to either provide the "reference" dataset within the NamedTuple (`dataset_preferred`), or the lat/lon/depth and dimensions of the new dataset. """ @@ -238,7 +238,7 @@ function combine_VolData(VolData::NamedTuple; lat=nothing, lon=nothing, depth=no names_fields = String.(keys(DataSet_interp.fields)) for (j,name) in enumerate(names_fields) name_new_field = DataSet_Names[i]*"_"*name # name of new field includes name of dataset - # Note: we use ustrip here, and thereby remove the values, as the cross-section routine made problems + # Note: we use ustrip here, and thereby remove the values, as the cross-section routine made problems DataSetRef = AddField(DataSetRef,name_new_field, ustrip.(DataSet_interp.fields[j])) end end @@ -253,11 +253,11 @@ end """ CreateProfileVolume!(Profile::ProfileData, VolData::AbstractGeneralGrid; DimsVolCross::NTuple=(100,100), Depth_extent=nothing) -Creates a cross-section through a volumetric 3D dataset `VolData` with the data supplied in `Profile`. `Depth_extent` can be the minimum & maximum depth for vertical profiles +Creates a cross-section through a volumetric 3D dataset `VolData` with the data supplied in `Profile`. `Depth_extent` can be the minimum & maximum depth for vertical profiles """ function CreateProfileVolume!(Profile::ProfileData, VolData::AbstractGeneralGrid; DimsVolCross::NTuple=(100,100), Depth_extent=nothing) - if Profile.vertical + if Profile.vertical # take a vertical cross section cross_tmp = CrossSection(VolData,dims=DimsVolCross, Start=Profile.start_lonlat,End=Profile.end_lonlat,Depth_extent=Depth_extent) # create the cross section @@ -269,7 +269,7 @@ function CreateProfileVolume!(Profile::ProfileData, VolData::AbstractGeneralGrid # take a horizontal cross section cross_tmp = CrossSection(VolData, Depth_level=Profile.depth, Interpolate=true, dims=DimsVolCross) end - + Profile.VolData = cross_tmp # assign to Profile data structure return nothing end @@ -278,15 +278,15 @@ end ### internal function to process surface data - contrary to the volume data, we here have to save lon/lat/depth pairs for every surface data set, so we create a NamedTuple of GeoData data sets function CreateProfileSurface!(Profile::ProfileData, DataSet::NamedTuple; DimsSurfCross=(100,)) num_datasets = length(DataSet) - - tmp = NamedTuple() # initialize empty one + + tmp = NamedTuple() # initialize empty one DataSetName = keys(DataSet) # Names of the datasets for idata = 1:num_datasets # load data set --> each data set is a single GeoData structure, so we'll only have to get the respective key to load the correct type data_tmp = DataSet[idata] - if Profile.vertical + if Profile.vertical # take a vertical cross section data = CrossSectionSurface(data_tmp, dims=DimsSurfCross, Start=Profile.start_lonlat, End=Profile.end_lonlat) # create the cross section @@ -296,16 +296,16 @@ function CreateProfileSurface!(Profile::ProfileData, DataSet::NamedTuple; DimsSu # add the data set as a NamedTuple data_NT = NamedTuple{(DataSetName[idata],)}((data,)) - tmp = merge(tmp,data_NT) + tmp = merge(tmp,data_NT) else - # we do not have this implemented + # we do not have this implemented #error("horizontal profiles not yet implemented") end end Profile.SurfData = tmp # assign to profile data structure - return + return end @@ -313,15 +313,15 @@ end ### function to process point data - contrary to the volume data, we here have to save lon/lat/depth pairs for every point data set function CreateProfilePoint!(Profile::ProfileData, DataSet::NamedTuple; section_width=50km) num_datasets = length(DataSet) - - tmp = NamedTuple() # initialize empty one + + tmp = NamedTuple() # initialize empty one DataSetName = keys(DataSet) # Names of the datasets for idata = 1:num_datasets # load data set --> each data set is a single GeoData structure, so we'll only have to get the respective key to load the correct type data_tmp = DataSet[idata] - if Profile.vertical + if Profile.vertical # take a vertical cross section data = CrossSectionPoints(data_tmp, Start=Profile.start_lonlat, End=Profile.end_lonlat, section_width = section_width) # create the cross section @@ -331,7 +331,7 @@ function CreateProfilePoint!(Profile::ProfileData, DataSet::NamedTuple; section_ # add the data set as a NamedTuple data_NT = NamedTuple{(DataSetName[idata],)}((data,)) - tmp = merge(tmp,data_NT) + tmp = merge(tmp,data_NT) else # take a horizontal cross section @@ -339,12 +339,12 @@ function CreateProfilePoint!(Profile::ProfileData, DataSet::NamedTuple; section_ # add the data set as a NamedTuple data_NT = NamedTuple{(DataSetName[idata],)}((data,)) - tmp = merge(tmp,data_NT) + tmp = merge(tmp,data_NT) end end Profile.PointData = tmp # assign to profile data structure - return + return end @@ -404,7 +404,7 @@ function ExtractProfileData(ProfileCoordFile::String,ProfileNumber::Int64,DataSe # load all Data VolData, SurfData, PointData, ScreenshotData, TopoData = load_GMG(Datasets_all) - + # merge VolData: VolData_combined = combine_VolData(VolData) @@ -419,7 +419,7 @@ end #= -# Boris: I don't know exactly in which format you have your current files; +# Boris: I don't know exactly in which format you have your current files; ### wrapper function to extract data for a single profile function ExtractProfileData(ProfileCoordFile,ProfileNumber,DataSetName,DataSetFile,DataSetType,DimsVolCross,DepthVol,DimsSurfCross,WidthPointProfile) @@ -460,7 +460,7 @@ end ### wrapper function to read the profile numbers+coordinates from a text file, the dataset names+locations+types from another text file ### once this is done, the different datasets are projected onto the profiles -### currently, the function is quite slow, as the different data sets are reloaded for each profile. +### currently, the function is quite slow, as the different data sets are reloaded for each profile. ### a faster way would be to load one data set and create the profiles from it and then move on to the next one. However, this would require to hold all the profile data in memory, which may be a bit much... function CreateProfileData(file_profiles,file_datasets;Depth_extent=(-300,0),DimsVolCross=(500,300),DimsSurfCross = (100,),WidthPointProfile = 20km) @@ -481,14 +481,12 @@ function CreateProfileData(file_profiles,file_datasets;Depth_extent=(-300,0),Dim # 2. process the profiles ExtractedData = ExtractProfileData(file_profiles,ProfileNumber[iprofile],DataSetName,DataSetFile,DataSetType,DimsVolCross,Depth_extent,DimsSurfCross,WidthPointProfile) - + # 3. save data as JLD2 fn = "Profile"*string(ProfileNumber[iprofile]) jldsave(fn*".jld2";ExtractedData) - + end end =# - - diff --git a/src/Setup_geometry.jl b/src/Setup_geometry.jl index af2092e4..c65cbc81 100644 --- a/src/Setup_geometry.jl +++ b/src/Setup_geometry.jl @@ -1,17 +1,17 @@ using Base: Int64, Float64, NamedTuple using Printf using Parameters # helps setting default parameters in structures -using SpecialFunctions: erfc +using SpecialFunctions: erfc # Setup_geometry -# +# # These are routines that help to create input geometries, such as slabs with a given angle # -export AddBox!, AddSphere!, AddEllipsoid!, AddCylinder!, +export AddBox!, AddSphere!, AddEllipsoid!, AddCylinder!, AddLayer!, makeVolcTopo, ConstantTemp, LinearTemp, HalfspaceCoolingTemp, SpreadingRateTemp, - ConstantPhase, LithosphericPhases, + ConstantPhase, LithosphericPhases, Compute_ThermalStructure, Compute_Phase @@ -35,17 +35,17 @@ Parameters - Origin - the origin, used to rotate the box around. Default is the left-front-top corner - StrikeAngle - strike angle of slab - DipAngle - dip angle of slab -- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` -- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` +- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` +- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` Examples ======== Example 1) Box with constant phase and temperature & a dip angle of 10 degrees: -```julia +```julia julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") -LaMEM Grid: +LaMEM Grid: nel : (32, 32, 32) marker/cell : (3, 3, 3) markers : (96, 96, 96) @@ -56,38 +56,38 @@ julia> Phases = zeros(Int32, size(Grid.X)); julia> Temp = zeros(Float64, size(Grid.X)); julia> AddBox!(Phases,Temp,Grid, xlim=(0,500), zlim=(-50,0), phase=ConstantPhase(3), DipAngle=10, T=ConstantTemp(1000)) julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model -julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview 1-element Vector{String}: - "LaMEM_ModelSetup.vts" + "LaMEM_ModelSetup.vts" ``` Example 2) Box with halfspace cooling profile -```julia +```julia julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") julia> Phases = zeros(Int32, size(Grid.X)); julia> Temp = zeros(Float64, size(Grid.X)); julia> AddBox!(Phases,Temp,Grid, xlim=(0,500), zlim=(-50,0), phase=ConstantPhase(3), DipAngle=10, T=ConstantTemp(1000)) julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model -julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview 1-element Vector{String}: - "LaMEM_ModelSetup.vts" + "LaMEM_ModelSetup.vts" ``` """ function AddBox!(Phase, Temp, Grid::AbstractGeneralGrid; # required input xlim=Tuple{2}, ylim=nothing, zlim=Tuple{2}, # limits of the box Origin=nothing, StrikeAngle=0, DipAngle=0, # origin & dip/strike phase = ConstantPhase(1), # Sets the phase number(s) in the box - T=nothing ) # Sets the thermal structure (various fucntions are available) - + T=nothing ) # Sets the thermal structure (various functions are available) + # Retrieve 3D data arrays for the grid X,Y,Z = coordinate_grids(Grid) - # Limits of block - if ylim==nothing - ylim = (minimum(Y), maximum(Y)) + # Limits of block + if ylim==nothing + ylim = (minimum(Y), maximum(Y)) end - - if Origin==nothing + + if Origin==nothing Origin = (xlim[1], ylim[1], zlim[2]) # upper-left corner end @@ -102,10 +102,10 @@ function AddBox!(Phase, Temp, Grid::AbstractGeneralGrid; # requi # Set phase number & thermal structure in the full domain ztop = zlim[2] - Origin[3] zbot = zlim[1] - Origin[3] - ind = findall( (Xrot .>= (xlim[1] - Origin[1])) .& (Xrot .<= (xlim[2] - Origin[1])) .& - (Yrot .>= (ylim[1] - Origin[2])) .& (Yrot .<= (ylim[2] - Origin[2])) .& + ind = findall( (Xrot .>= (xlim[1] - Origin[1])) .& (Xrot .<= (xlim[2] - Origin[1])) .& + (Yrot .>= (ylim[1] - Origin[2])) .& (Yrot .<= (ylim[2] - Origin[2])) .& (Zrot .>= zbot) .& (Zrot .<= ztop) ) - + # Compute thermal structure accordingly. See routines below for different options if T != nothing @@ -114,10 +114,111 @@ function AddBox!(Phase, Temp, Grid::AbstractGeneralGrid; # requi # Set the phase. Different routines are available for that - see below. Phase[ind] = Compute_Phase(Phase[ind], Temp[ind], Xrot[ind], Yrot[ind], Zrot[ind], phase) - + + return nothing +end + + +""" + AddLayer!(Phase, Temp, Grid::AbstractGeneralGrid; xlim=Tuple{2}, [ylim=Tuple{2}], zlim=Tuple{2}, + phase = ConstantPhase(1), + T=nothing ) + +Adds a layer with phase & temperature structure to a 3D model setup. The most common use would be to add a lithospheric layer to a model setup. +This simplifies creating model geometries in geodynamic models + + +Parameters +==== +- Phase - Phase array (consistent with Grid) +- Temp - Temperature array (consistent with Grid) +- Grid - grid structure (usually obtained with ReadLaMEM_InputFile, but can also be other grid types) +- xlim - left/right coordinates of box +- ylim - front/back coordinates of box +- zlim - bottom/top coordinates of box +- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` +- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` + + +Examples +======== + +Example 1) Layer with constant phase and temperature +```julia +julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") +LaMEM Grid: + nel : (32, 32, 32) + marker/cell : (3, 3, 3) + markers : (96, 96, 96) + x ϵ [-3.0 : 3.0] + y ϵ [-2.0 : 2.0] + z ϵ [-2.0 : 0.0] +julia> Phases = zeros(Int32, size(Grid.X)); +julia> Temp = zeros(Float64, size(Grid.X)); +julia> AddLayer!(Phases,Temp,Grid, zlim=(-50,0), phase=ConstantPhase(3), T=ConstantTemp(1000)) +julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +1-element Vector{String}: + "LaMEM_ModelSetup.vts" +``` + +Example 2) Box with halfspace cooling profile +```julia +julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") +julia> Phases = zeros(Int32, size(Grid.X)); +julia> Temp = zeros(Float64, size(Grid.X)); +julia> AddLayer!(Phases,Temp,Grid, zlim=(-50,0), phase=ConstantPhase(3), T=HalfspaceCoolingTemp()) +julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +1-element Vector{String}: + "LaMEM_ModelSetup.vts" +``` +""" +function AddLayer!(Phase, Temp, Grid::AbstractGeneralGrid; # required input + xlim=nothing, ylim=nothing, zlim=nothing, # limits of the layer + phase = ConstantPhase(1), # Sets the phase number(s) in the box + T=nothing ) # Sets the thermal structure (various functions are available) + + # Retrieve 3D data arrays for the grid + X,Y,Z = coordinate_grids(Grid) + + # Limits of block + if isnothing(xlim)==isnothing(ylim)==isnothing(zlim) + error("You need to specify at least one of the limits (xlim, ylim, zlim)") + end + + if isnothing(xlim) + xlim = (minimum(X), maximum(X)) + end + if isnothing(ylim) + ylim = (minimum(Y), maximum(Y)) + end + if isnothing(zlim) + zlim = (minimum(Z), maximum(Z)) + end + + # Set phase number & thermal structure in the full domain + ind = findall( (X .>= (xlim[1])) .& (X .<= (xlim[2])) .& + (Y .>= (ylim[1])) .& (Y .<= (ylim[2])) .& + (Z .>= (zlim[1])) .& (Z .<= (zlim[2])) + ) + + + # Compute thermal structure accordingly. See routines below for different options + if !isnothing(T) + Temp[ind] = Compute_ThermalStructure(Temp[ind], X[ind], Y[ind], Z[ind], T) + end + + # Set the phase. Different routines are available for that - see below. + Phase[ind] = Compute_Phase(Phase[ind], Temp[ind], X[ind], Y[ind], Z[ind], phase) + return nothing end + + + + """ AddSphere!(Phase, Temp, Grid::AbstractGeneralGrid; cen=Tuple{3}, radius=Tuple{1}, phase = ConstantPhase(1). @@ -133,17 +234,17 @@ Parameters - Grid - LaMEM grid structure (usually obtained with ReadLaMEM_InputFile) - cen - center coordinates of sphere - radius - radius of sphere -- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` -- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` +- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` +- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` Example ======== Sphere with constant phase and temperature: -```julia +```julia julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") -LaMEM Grid: +LaMEM Grid: nel : (32, 32, 32) marker/cell : (3, 3, 3) markers : (96, 96, 96) @@ -154,16 +255,16 @@ julia> Phases = zeros(Int32, size(Grid.X)); julia> Temp = zeros(Float64, size(Grid.X)); julia> AddSphere!(Phases,Temp,Grid, cen=(0,0,-1), radius=0.5, phase=ConstantPhase(2), T=ConstantTemp(800)) julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model -julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview 1-element Vector{String}: - "LaMEM_ModelSetup.vts" + "LaMEM_ModelSetup.vts" ``` """ function AddSphere!(Phase, Temp, Grid::AbstractGeneralGrid; # required input cen=Tuple{3}, radius=Tuple{1}, # center and radius of the sphere phase = ConstantPhase(1), # Sets the phase number(s) in the sphere - T=nothing ) # Sets the thermal structure (various fucntions are available) - + T=nothing ) # Sets the thermal structure (various functions are available) + # Retrieve 3D data arrays for the grid X,Y,Z = coordinate_grids(Grid) @@ -200,17 +301,17 @@ Parameters - Origin - the origin, used to rotate the box around. Default is the left-front-top corner - StrikeAngle - strike angle of slab - DipAngle - dip angle of slab -- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` -- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` +- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` +- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` Example ======== Ellipsoid with constant phase and temperature, rotated 90 degrees and tilted by 45 degrees: -```julia +```julia julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") -LaMEM Grid: +LaMEM Grid: nel : (32, 32, 32) marker/cell : (3, 3, 3) markers : (96, 96, 96) @@ -221,18 +322,18 @@ julia> Phases = zeros(Int32, size(Grid.X)); julia> Temp = zeros(Float64, size(Grid.X)); julia> AddEllipsoid!(Phases,Temp,Grid, cen=(-1,-1,-1), axes=(0.2,0.1,0.5), StrikeAngle=90, DipAngle=45, phase=ConstantPhase(3), T=ConstantTemp(600)) julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model -julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview 1-element Vector{String}: - "LaMEM_ModelSetup.vts" + "LaMEM_ModelSetup.vts" ``` """ function AddEllipsoid!(Phase, Temp, Grid::AbstractGeneralGrid; # required input cen=Tuple{3}, axes=Tuple{3}, # center and semi-axes of the ellpsoid Origin=nothing, StrikeAngle=0, DipAngle=0, # origin & dip/strike phase = ConstantPhase(1), # Sets the phase number(s) in the box - T=nothing ) # Sets the thermal structure (various fucntions are available) + T=nothing ) # Sets the thermal structure (various functions are available) - if Origin==nothing + if Origin==nothing Origin = cen # center end @@ -281,17 +382,17 @@ Parameters - base - center coordinate of bottom of cylinder - cap - center coordinate of top of cylinder - radius - radius of the cylinder -- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` -- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` +- phase - specifies the phase of the box. See `ConstantPhase()`,`LithosphericPhases()` +- T - specifies the temperature of the box. See `ConstantTemp()`,`LinearTemp()`,`HalfspaceCoolingTemp()`,`SpreadingRateTemp()` Example ======== Cylinder with constant phase and temperature: -```julia +```julia julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") -LaMEM Grid: +LaMEM Grid: nel : (32, 32, 32) marker/cell : (3, 3, 3) markers : (96, 96, 96) @@ -302,16 +403,16 @@ julia> Phases = zeros(Int32, size(Grid.X)); julia> Temp = zeros(Float64, size(Grid.X)); julia> AddCylinder!(Phases,Temp,Grid, base=(-1,-1,-1.5), cap=(1,1,-0.5), radius=0.25, phase=ConstantPhase(4), T=ConstantTemp(400)) julia> Model3D = ParaviewData(Grid, (Phases=Phases,Temp=Temp)); # Create Cartesian model -julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview +julia> Write_Paraview(Model3D,"LaMEM_ModelSetup") # Save model to paraview 1-element Vector{String}: - "LaMEM_ModelSetup.vts" + "LaMEM_ModelSetup.vts" ``` """ function AddCylinder!(Phase, Temp, Grid::AbstractGeneralGrid; # required input base=Tuple{3}, cap=Tuple{3}, radius=Tuple{1}, # center and radius of the sphere phase = ConstantPhase(1), # Sets the phase number(s) in the sphere - T=nothing ) # Sets the thermal structure (various fucntions are available) - + T=nothing ) # Sets the thermal structure (various functions are available) + # axis vector of cylinder axVec = cap .- base ax2 = (axVec[1]^2 + axVec[2]^2 + axVec[3]^2) @@ -355,8 +456,8 @@ function Rot3D!(X,Y,Z, StrikeAngle, DipAngle) for i in eachindex(X) CoordVec = [X[i], Y[i], Z[i]] - CoordRot = rotz*CoordVec; - CoordRot = roty*CoordRot; + CoordRot = rotz*CoordVec; + CoordRot = roty*CoordRot; X[i] = CoordRot[1]; Y[i] = CoordRot[2]; Z[i] = CoordRot[3]; @@ -390,9 +491,9 @@ Example ======== Cylinder with constant phase and temperature: -```julia +```julia julia> Grid = ReadLaMEM_InputFile("test_files/SaltModels.dat") -LaMEM Grid: +LaMEM Grid: nel : (32, 32, 32) marker/cell : (3, 3, 3) markers : (96, 96, 96) @@ -400,7 +501,7 @@ LaMEM Grid: y ϵ [-2.0 : 2.0] z ϵ [-2.0 : 0.0] julia> Topo = makeVolcTopo(Grid, center=[0.0,0.0], height=0.4, radius=1.5, crater=0.5, base=0.1) -CartData +CartData size : (33, 33, 1) x ϵ [ -3.0 : 3.0] y ϵ [ -2.0 : 2.0] @@ -408,21 +509,21 @@ CartData fields : (:Topography,) attributes: ["note"] julia> Topo = makeVolcTopo(Grid, center=[0.0,0.0], height=0.8, radius=0.5, crater=0.0, base=0.4, background=Topo.fields.Topography) -CartData +CartData size : (33, 33, 1) x ϵ [ -3.0 : 3.0] y ϵ [ -2.0 : 2.0] z ϵ [ 0.1 : 0.8] fields : (:Topography,) attributes: ["note"] -julia> Write_Paraview(Topo,"VolcanoTopo") # Save topography to paraview -Saved file: VolcanoTopo.vts +julia> Write_Paraview(Topo,"VolcanoTopo") # Save topography to paraview +Saved file: VolcanoTopo.vts ``` """ -function makeVolcTopo(Grid::LaMEM_grid; - center::Array{Float64, 1}, - height::Float64, - radius::Float64, +function makeVolcTopo(Grid::LaMEM_grid; + center::Array{Float64, 1}, + height::Float64, + radius::Float64, crater=0.0, base=0.0, background=nothing) @@ -443,7 +544,7 @@ function makeVolcTopo(Grid::LaMEM_grid; # get radial distance from crater rim RD .-= crater - + # find position relative to crater rim dr = radius - crater pos = (-RD ./ dr .+ 1) @@ -456,7 +557,7 @@ function makeVolcTopo(Grid::LaMEM_grid; else background = nondimensionalize(background, CharUnits) if size(background) == size(X) - H .= background + H .= background elseif size(background) == size(reshape(X,nx,ny,1)) H .= background[:,:,1] else @@ -467,7 +568,7 @@ function makeVolcTopo(Grid::LaMEM_grid; H[ind] .= pos[ind] .* (height-base) .+ base ind = findall(x->x>= 1.0, pos) H[ind] .= height - + # dimensionalize Topo = dimensionalize(H, km, CharUnits) @@ -476,12 +577,12 @@ function makeVolcTopo(Grid::LaMEM_grid; end -abstract type AbstractThermalStructure end +abstract type AbstractThermalStructure end """ ConstantTemp(T=1000) - + Sets a constant temperature inside the box Parameters @@ -500,7 +601,7 @@ end """ LinearTemp(Ttop=0, Tbot=1000) - + Set a linear temperature structure from top to bottom Parameters @@ -526,7 +627,7 @@ end """ HalfspaceCoolingTemp(Tsurface=0, Tmantle=1350, Age=60, Adiabat=0) - + Sets a halfspace temperature structure in plate Parameters @@ -553,7 +654,7 @@ function Compute_ThermalStructure(Temp, X, Y, Z, s::HalfspaceCoolingTemp) ThermalAge = Age*1e6*SecYear; MantleAdiabaticT = Tmantle .+ Adiabat*abs.(Z); # Adiabatic temperature of mantle - + for i in eachindex(Temp) Temp[i] = (Tsurface .- Tmantle)*erfc((abs.(Z[i])*1e3)./(2*sqrt(kappa*ThermalAge))) + MantleAdiabaticT[i]; end @@ -563,7 +664,7 @@ end """ SpreadingRateTemp(Tsurface=0, Tmantle=1350, Adiabat=0, MORside="left",SpreadingVel=3, AgeRidge=0, maxAge=80) - + Sets a halfspace temperature structure within the box, combined with a spreading rate (which implies that the plate age varies) Parameters @@ -593,28 +694,28 @@ function Compute_ThermalStructure(Temp, X, Y, Z, s::SpreadingRateTemp) kappa = 1e-6; SecYear = 3600*24*365 dz = Z[end]-Z[1]; - + MantleAdiabaticT = Tmantle .+ Adiabat*abs.(Z); # Adiabatic temperature of mantle - + if MORside=="left" - Distance = X .- X[1,1,1]; + Distance = X .- X[1,1,1]; elseif MORside=="right" - Distance = X[end,1,1] .- X; + Distance = X[end,1,1] .- X; elseif MORside=="front" - Distance = Y .- Y[1,1,1]; + Distance = Y .- Y[1,1,1]; elseif MORside=="back" - Distance = Y[1,end,1] .- Y; + Distance = Y[1,end,1] .- Y; else error("unknown side") end - + for i in eachindex(Temp) ThermalAge = abs(Distance[i]*1e3*1e2)/SpreadingVel + AgeRidge*1e6; # Thermal age in years if ThermalAge>maxAge*1e6 ThermalAge = maxAge*1e6 end - + ThermalAge = ThermalAge*SecYear; Temp[i] = (Tsurface .- Tmantle)*erfc((abs.(Z[i])*1e3)./(2*sqrt(kappa*ThermalAge))) + MantleAdiabaticT[i]; @@ -624,12 +725,12 @@ end -abstract type AbstractPhaseNumber end +abstract type AbstractPhaseNumber end """ ConstantPhase(phase=1) - + Sets a constant phase inside the box Parameters @@ -649,7 +750,7 @@ end """ LithosphericPhases(Layers=[10 20 15], Phases=[1 2 3 4], Tlab=nothing ) - + This allows defining a layered lithosphere. Layering is defined from the top downwards. Parameters @@ -709,4 +810,4 @@ function Compute_Phase(Phase, Temp, X, Y, Z, s::LithosphericPhases; Ztop=0) end # allow AbstractGeneralGrid instead of Z and Ztop -Compute_Phase(Phase, Temp, Grid::LaMEM_grid, s::LithosphericPhases) = Compute_Phase(Phase, Temp, Grid.X, Grid.Y, Grid.Z, s::LithosphericPhases, Ztop=maximum(Grid.coord_z)) \ No newline at end of file +Compute_Phase(Phase, Temp, Grid::LaMEM_grid, s::LithosphericPhases) = Compute_Phase(Phase, Temp, Grid.X, Grid.Y, Grid.Z, s::LithosphericPhases, Ztop=maximum(Grid.coord_z)) diff --git a/src/data_conversion.jl b/src/data_conversion.jl index 30acafc8..7bb5d261 100644 --- a/src/data_conversion.jl +++ b/src/data_conversion.jl @@ -1,20 +1,20 @@ """ -**data_conversion.jl** contains functions to convert imported data from e.g. CSV or netCDF files to the GeoData format -that is used internally to store all data related to a certain data set. For importing files, use standard methods, such -as CSV import using *readdlm* (see the [DelimitedFiles.jl](https://docs.julialang.org/en/v1/stdlib/DelimitedFiles/) package) -or netCDF import (see the [NetCDF.jl](https://github.com/JuliaGeo/NetCDF.jl) package). +**data_conversion.jl** contains functions to convert imported data from e.g. CSV or netCDF files to the GeoData format +that is used internally to store all data related to a certain data set. For importing files, use standard methods, such +as CSV import using *readdlm* (see the [DelimitedFiles.jl](https://docs.julialang.org/en/v1/stdlib/DelimitedFiles/) package) +or netCDF import (see the [NetCDF.jl](https://github.com/JuliaGeo/NetCDF.jl) package). """ """ -Using *readdlm* on CSV files will provide two output arguments, one containing the header as a Array{AbstractString, 2} and +Using *readdlm* on CSV files will provide two output arguments, one containing the header as a Array{AbstractString, 2} and the other one containing the data as Array{Float64, 2}. """ ############ CONVERT DLM DATA TO GEO DATA function DLM2Geo(hdr::Array{AbstractString, 2},data::Array{Float64, 2},DepthCon::AbstractString) - + # initialize array of structures to store the data - # while doing so, separate the unit from the variable name + # while doing so, separate the unit from the variable name ndata = size(data,1) # number of entries nfields = size(data,2) # number of fields @@ -39,7 +39,7 @@ function DLM2Geo(hdr::Array{AbstractString, 2},data::Array{Float64, 2},DepthCon: LatData = data[1:end,ifield] elseif occursin("depth",hdr[ifield]) # ISSUE: WE DEFINE DEPTH AS NEGATIVE, BUT HOW DO WE SET THAT? - # WE COULD ADD A FLAG THAT INDICATES THE DEPTH CONVENTION AND + # WE COULD ADD A FLAG THAT INDICATES THE DEPTH CONVENTION AND # TREAT IT ACCORDINGLY depth_ind = ifield; varname = GetVariableName(hdr[ifield])# get variable name @@ -86,7 +86,7 @@ function DLM2Geo(hdr::Array{AbstractString, 2},data::Array{Float64, 2},DepthCon: # take care of the header strings varname = GetVariableName(tmp_hdr[ihdr])# get variable name - varunit = GetVariableUnit(tmp_hdr[ihdr])# get variable unit + varunit = GetVariableUnit(tmp_hdr[ihdr])# get variable unit if cmp(varunit,"%")==0 tmp_hdr[ihdr] = string(varname,"_percentage") else @@ -100,26 +100,26 @@ function DLM2Geo(hdr::Array{AbstractString, 2},data::Array{Float64, 2},DepthCon: hdr_tpl = Tuple(Symbol(x) for x in tmp_hdr) # convert header to tuple data_tpl = Tuple.(tmp_vec for i in size(tmp_vec,1)) # convert data to tuple tmp = NamedTuple{hdr_tpl}(data_tpl) - + println(typeof(tmp)) # initialize data structure importdata = GeoData(LonData,LatData,DepthData,tmp) # assign data to output - return importdata + return importdata end -########### REARRANGE DATA TO OBTAIN A 3D MATIX IF NECESSARY ########## +########### REARRANGE DATA TO OBTAIN A 3D MATRIX IF NECESSARY ########## function RearrangeToMatrix() end ########### CONVERT NETCDF DATA TO GEO DATA ######## """ -Converting netCDF data to GeoData is fairly easy. One can read in Data from a netCDF file using ncread("filename","variable") -(the contents of the netcdf file can be queried beforehand using ncinfo("filename")). Via *ncread*, one then reads in all the +Converting netCDF data to GeoData is fairly easy. One can read in Data from a netCDF file using ncread("filename","variable") +(the contents of the netcdf file can be queried beforehand using ncinfo("filename")). Via *ncread*, one then reads in all the desired variables. NetCDF2Geo then takes care of converting this data to GeoData. """ function NetCDF2Geo() @@ -136,7 +136,7 @@ function GetVariableName(inputstring::SubString{String}) indfirst = nothing iloop = 1 str2find = ["(","[","{"] - # find first occurence of one of the brackets + # find first occurrence of one of the brackets while isnothing(indfirst) indfirst = findfirst(str2find[iloop],inputstring) iloop = iloop + 1 @@ -144,7 +144,7 @@ function GetVariableName(inputstring::SubString{String}) break end end - + # either return the whole inputstring or only the part before the unit if isnothing(indfirst) return inputstring @@ -180,9 +180,3 @@ function GetVariableUnit(inputstring::SubString{String}) end end - - - - - - diff --git a/src/data_import.jl b/src/data_import.jl index 1167f729..247ac27f 100644 --- a/src/data_import.jl +++ b/src/data_import.jl @@ -1,6 +1,6 @@ # This is data_import.jl # -# This file contains functions to import different data types. +# This file contains functions to import different data types. # # Author: Marcel Thielmann, 05/2021 @@ -13,9 +13,9 @@ function ReadCSV_LatLon(filename::AbstractString,DepthCon::AbstractString) # import data from file with coordinates given in lat/lon/depth format and additional data given in additional columns # the idea here is to assign the data to a structure of the type GeoData which will then be used for further processing data,hdr = readdlm(filename,',', Float64,'\n'; header=true, skipblanks=true, comments=true, comment_char='#') - + # initialize array of structures to store the data - # while doing so, separate the unit from the variable name + # while doing so, separate the unit from the variable name ndata = size(data,1) # number of entries nfields = size(data,2) # number of fields @@ -40,7 +40,7 @@ function ReadCSV_LatLon(filename::AbstractString,DepthCon::AbstractString) LatData = data[1:end,ifield] elseif occursin("depth",hdr[ifield]) # ISSUE: WE DEFINE DEPTH AS NEGATIVE, BUT HOW DO WE SET THAT? - # WE COULD ADD A FLAG THAT INDICATES THE DEPTH CONVENTION AND + # WE COULD ADD A FLAG THAT INDICATES THE DEPTH CONVENTION AND # TREAT IT ACCORDINGLY depth_ind = ifield; varname = GetVariableName(hdr[ifield])# get variable name @@ -78,7 +78,7 @@ function ReadCSV_LatLon(filename::AbstractString,DepthCon::AbstractString) # take care of the header strings varname = GetVariableName(tmp_hdr[ihdr])# get variable name - varunit = GetVariableUnit(tmp_hdr[ihdr])# get variable unit + varunit = GetVariableUnit(tmp_hdr[ihdr])# get variable unit if cmp(varunit,"%")==0 tmp_hdr[ihdr] = string(varname,"_percentage") else @@ -92,14 +92,14 @@ function ReadCSV_LatLon(filename::AbstractString,DepthCon::AbstractString) hdr_tpl = Tuple(Symbol(x) for x in tmp_hdr) # convert header to tuple data_tpl = Tuple.(tmp_vec for i in size(tmp_vec,1)) # convert data to tuple tmp = NamedTuple{hdr_tpl}(data_tpl) - + println(typeof(tmp)) # initialize data structure importdata = GeoData(LonData,LatData,DepthData,tmp) # assign data to output - return importdata + return importdata end @@ -110,7 +110,7 @@ function GetVariableName(inputstring::SubString{String}) indfirst = nothing iloop = 1 str2find = ["(","[","{"] - # find first occurence of one of the brackets + # find first occurrence of one of the brackets while isnothing(indfirst) indfirst = findfirst(str2find[iloop],inputstring) iloop = iloop + 1 @@ -118,7 +118,7 @@ function GetVariableName(inputstring::SubString{String}) break end end - + # either return the whole inputstring or only the part before the unit if isnothing(indfirst) return inputstring @@ -158,22 +158,22 @@ end """ - Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, Cartesian=false, UTM=false, UTMzone, isnorth=true) + Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, Cartesian=false, UTM=false, UTMzone, isnorth=true, fieldname::Symbol=:colors) Take a screenshot of Georeferenced image either a `lat/lon`, `x,y` (if `Cartesian=true`) or in UTM coordinates (if `UTM=true`) at a given depth or along profile and converts it to a `GeoData`, `CartData` or `UTMData` struct, which can be saved to Paraview -The lower left and upper right coordinates of the image need to be specified in tuples of `(lon,lat,depth)` or `(UTM_ew, UTM_ns, depth)`, where `depth` is negative in the Earth (and in km). +The lower left and upper right coordinates of the image need to be specified in tuples of `(lon,lat,depth)` or `(UTM_ew, UTM_ns, depth)`, where `depth` is negative inside the Earth (and in km). The lower right and upper left corners can be specified optionally (to take non-orthogonal images into account). If they are not specified, the image is considered orthogonal and the corners are computed from the other two. *Note*: if your data is in `UTM` coordinates you also need to provide the `UTMzone` and whether we are on the northern hemisphere or not (`isnorth`). """ -function Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, Cartesian=false, UTM=false, UTMzone=nothing, isnorth=true) +function Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, Cartesian=false, UTM=false, UTMzone=nothing, isnorth::Bool=true, fieldname::Symbol=:colors) img = load(filename) # load image # Define lon/lat/depth of lower left corner - + # try to determine if this is a horizontal profile or not if abs(Corner_UpperRight[3]-Corner_LowerLeft[3])>0.0 DepthProfile = true @@ -181,7 +181,7 @@ function Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperR DepthProfile = false end - # We should be able to either define 4 corners or only 2 and reconstruct the other two from the + # We should be able to either define 4 corners or only 2 and reconstruct the other two from the if isnothing(Corner_LowerRight) || isnothing(Corner_UpperLeft) if DepthProfile Corner_LowerRight = (Corner_UpperRight[1], Corner_UpperRight[2], Corner_LowerLeft[3]) @@ -235,7 +235,7 @@ function Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperR # i = 2; Corners_lat = [Corner_LowerLeft[i] Corner_LowerRight[i]; Corner_UpperLeft[i] Corner_UpperRight[i];] # i = 3; Corners_depth = [Corner_LowerLeft[i] Corner_LowerRight[i]; Corner_UpperLeft[i] Corner_UpperRight[i];] - + # Extract the colors from the grid img_RGB = convert.(RGB, img) # convert to RGB data @@ -263,20 +263,22 @@ function Screenshot_To_GeoData(filename::String, Corner_LowerLeft, Corner_UpperR Y = interp_linear_lat.(X_int, Y_int); Depth = interp_linear_depth.(X_int, Y_int); - # Transfer to 3D arrays (check if needed or not; if yes, redo error message in struct routin) + # Transfer to 3D arrays (check if needed or not; if yes, redo error message in struct routine) red = zeros(size(Depth)); red[:,:,1] = r; green = zeros(size(Depth)); green[:,:,1] = g; blue = zeros(size(Depth)); blue[:,:,1] = b; # Create GeoData structure - NOTE: RGB data must be 2D matrixes, not 3D! + color_data = NamedTuple{(fieldname,)}(((red,green,blue),)); + if Cartesian - data_Image = CartData(X, Y, Depth,(colors=(red,green,blue),)) + data_Image = CartData(X, Y, Depth, color_data) end if UTM - data_Image = UTMData(X, Y, Depth, UTMzone, isnorth, (colors=(red,green,blue),)) + data_Image = UTMData(X, Y, Depth, UTMzone, isnorth, color_data) end if (!Cartesian) && (!UTM) - data_Image = GeoData(X, Y, Depth, (colors=(red,green,blue),)) + data_Image = GeoData(X, Y, Depth, color_data) end return data_Image end @@ -287,28 +289,27 @@ end Does the same as `Screenshot_To_GeoData`, but returns a `CartData` structure """ -function Screenshot_To_CartData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing) - +function Screenshot_To_CartData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, fieldname::Symbol=:colors) + # first create a GeoData struct - Data_Cart = Screenshot_To_GeoData(filename, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=Corner_LowerRight, Corner_UpperLeft=Corner_UpperLeft, Cartesian=true) + Data_Cart = Screenshot_To_GeoData(filename, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=Corner_LowerRight, Corner_UpperLeft=Corner_UpperLeft, Cartesian=true, fieldname=fieldname) return Data_Cart end """ - Data = Screenshot_To_UTMData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, UTMzone::Int64=nothing, isnorth::Bool=true) + Data = Screenshot_To_UTMData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, UTMzone::Int64=nothing, isnorth::Bool=true, fieldname=:colors) -Does the same as `Screenshot_To_GeoData`, but returns for UTM data +Does the same as `Screenshot_To_GeoData`, but returns for UTM data Note that you have to specify the `UTMzone` and `isnorth` """ -function Screenshot_To_UTMData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, UTMzone::Int64=nothing, isnorth::Bool=true) +function Screenshot_To_UTMData(filename::String, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=nothing, Corner_UpperLeft=nothing, UTMzone::Int64=nothing, isnorth::Bool=true, fieldname::Symbol=:colors) # first create a GeoData struct - Data_UTM = Screenshot_To_GeoData(filename, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=Corner_LowerRight, Corner_UpperLeft=Corner_UpperLeft, Cartesian=false, UTM=true, UTMzone=UTMzone, isnorth=isnorth) + Data_UTM = Screenshot_To_GeoData(filename, Corner_LowerLeft, Corner_UpperRight; Corner_LowerRight=Corner_LowerRight, Corner_UpperLeft=Corner_UpperLeft, Cartesian=false, UTM=true, UTMzone=UTMzone, isnorth=isnorth, fieldname=fieldname) return Data_UTM end - diff --git a/src/data_types.jl b/src/data_types.jl index 4d7298f3..f783f291 100644 --- a/src/data_types.jl +++ b/src/data_types.jl @@ -7,7 +7,7 @@ export GeoData, ParaviewData, UTMData, CartData, LonLatDepthGrid, XYZGrid, Velocity_SphericalToCartesian!, Convert2UTMzone, Convert2CartData, ProjectionPoint, coordinate_grids, CreateCartGrid, CartGrid, flip - + """ struct ProjectionPoint @@ -22,7 +22,7 @@ export GeoData, ParaviewData, UTMData, CartData, Structure that holds the coordinates of a point that is used to project a data set from Lon/Lat to a Cartesian grid and vice-versa. """ struct ProjectionPoint - Lat :: Float64 + Lat :: Float64 Lon :: Float64 EW :: Float64 NS :: Float64 @@ -37,8 +37,8 @@ Defines a projection point used for map projections, by specifying latitude and """ function ProjectionPoint(; Lat=49.9929, Lon=8.2473) # Default = Mainz (center of universe) - x_lla = LLA(Lat, Lon, 0.0); # Lat/Lon/Alt of geodesy package - x_utmz = UTMZ(x_lla, wgs84) # UTMZ of + x_lla = LLA(Lat, Lon, 0.0); # Lat/Lon/Alt of geodesy package + x_utmz = UTMZ(x_lla, wgs84) # UTMZ of ProjectionPoint(Lat, Lon, x_utmz.x, x_utmz.y, Int64(x_utmz.zone), x_utmz.isnorth) end @@ -50,10 +50,10 @@ Defines a projection point used for map projections, by specifying UTM coordinat """ function ProjectionPoint(EW::Float64, NS::Float64, Zone::Int64, isnorth::Bool) - - x_utmz = UTMZ(EW,NS,0.0,Zone, isnorth) # UTMZ of - x_lla = LLA(x_utmz, wgs84); # Lat/Lon/Alt of geodesy package - + + x_utmz = UTMZ(EW,NS,0.0,Zone, isnorth) # UTMZ of + x_lla = LLA(x_utmz, wgs84); # Lat/Lon/Alt of geodesy package + ProjectionPoint(x_lla.lat, x_lla.lon, EW, NS, Zone, isnorth) end @@ -65,21 +65,21 @@ mutable struct ValueList values::Vector{Float64} end -""" +""" GeoData(lon::Any, lat:Any, depth::GeoUnit, fields::NamedTuple) - + Data structure that holds one or several fields with longitude, latitude and depth information. - `depth` can have units of meter, kilometer or be unitless; it will be converted to km. -- `fields` should ideally be a NamedTuple which allows you to specify the names of each of the fields. +- `fields` should ideally be a NamedTuple which allows you to specify the names of each of the fields. - In case you only pass one array we will convert it to a NamedTuple with default name. - A single field should be added as `(DataFieldName=Data,)` (don't forget the comma at the end). - Multiple fields can be added as well. `lon`,`lat`,`depth` should all have the same size as each of the `fields`. - In case you want to display a vector field in paraview, add it as a tuple: `(Velocity=(Veast,Vnorth,Vup), Veast=Veast, Vnorth=Vnorth, Vup=Vup)`; we automatically apply a vector transformation when transforming this to a `ParaviewData` structure from which we generate Paraview output. As this changes the magnitude of the arrows, you will no longer see the `[Veast,Vnorth,Vup]` components in Paraview which is why it is a good ideas to store them as separate Fields. -- Yet, there is one exception: if the name of the 3-component field is `colors`, we do not apply this vector transformation as this field is regarded to contain RGB colors. +- Yet, there is one exception: if the name of the 3-component field is `colors`, we do not apply this vector transformation as this field is regarded to contain RGB colors. - `Lat`,`Lon`,`Depth` should have the same size as the `Data` array. The ordering of the arrays is important. If they are 3D arrays, as in the example below, we assume that the first dimension corresponds to `lon`, second dimension to `lat` and third dimension to `depth` (which should be in km). See below for an example. -# Example +# Example ```julia-repl julia> Lat = 1.0:3:10.0; julia> Lon = 11.0:4:20.0; @@ -107,12 +107,12 @@ julia> Lat3D 1.0 4.0 7.0 10.0 1.0 4.0 7.0 10.0 1.0 4.0 7.0 10.0 - + [:, :, 2] = 1.0 4.0 7.0 10.0 1.0 4.0 7.0 10.0 1.0 4.0 7.0 10.0 - + [:, :, 3] = 1.0 4.0 7.0 10.0 1.0 4.0 7.0 10.0 @@ -123,19 +123,19 @@ julia> Depth3D -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km -20.0 km - + [:, :, 2] = -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km -15.0 km - + [:, :, 3] = -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km -10.0 km julia> Data = zeros(size(Lon3D)); -julia> Data_set = GeophysicalModelGenerator.GeoData(Lon3D,Lat3D,Depth3D,(DataFieldName=Data,)) -GeoData +julia> Data_set = GeophysicalModelGenerator.GeoData(Lon3D,Lat3D,Depth3D,(DataFieldName=Data,)) +GeoData size : (3, 4, 3) lon ϵ [ 11.0 : 19.0] lat ϵ [ 1.0 : 10.0] @@ -146,16 +146,16 @@ GeoData """ struct GeoData <: AbstractGeneralGrid lon :: GeoUnit - lat :: GeoUnit + lat :: GeoUnit depth :: GeoUnit fields :: NamedTuple atts :: Dict - + # Ensure that the data is of the correct format function GeoData(lon,lat,depth,fields,atts=nothing) - + # check depth & convert it to units of km in case no units are given or it has different length units - if unit.(depth[1])==NoUnits + if unit.(depth[1])==NoUnits depth = depth*km # in case depth has no dimensions end depth = uconvert.(km,depth) # convert to km @@ -169,14 +169,14 @@ struct GeoData <: AbstractGeneralGrid lon = Vector(lon); end - # Check ordering of the arrays in case of 3D + # Check ordering of the arrays in case of 3D -- the check is not bullet proof for now if sum(size(lon).>1)==3 - if maximum(abs.(diff(lon,dims=2)))>maximum(abs.(diff(lon,dims=1))) || maximum(abs.(diff(lon,dims=3)))>maximum(abs.(diff(lon,dims=1))) - error("It appears that the lon array has a wrong ordering") - end - if maximum(abs.(diff(lat,dims=1)))>maximum(abs.(diff(lat,dims=2))) || maximum(abs.(diff(lat,dims=3)))>maximum(abs.(diff(lat,dims=2))) - error("It appears that the lat array has a wrong ordering") - end + if maximum(abs.(diff(lon,dims=2)))>maximum(abs.(diff(lon,dims=1))) || maximum(abs.(diff(lon,dims=3)))>maximum(abs.(diff(lon,dims=1))) + @warn ("It appears that the lon array has a wrong ordering") + end + if maximum(abs.(diff(lat,dims=1)))>maximum(abs.(diff(lat,dims=2))) || maximum(abs.(diff(lat,dims=3)))>maximum(abs.(diff(lat,dims=2))) + @warn ("It appears that the lat array has a wrong ordering") + end end # fields should be a NamedTuple. In case we simply provide an array, lets transfer it accordingly @@ -197,10 +197,10 @@ struct GeoData <: AbstractGeneralGrid DataField = DataField[1]; # in case we have velocity vectors as input end - if !(size(lon)==size(lat)==size(depth)==size(DataField)) + if !(size(lon)==size(lat)==size(depth)==size(DataField)) error("The size of Lon/Lat/Depth and the Fields should all be the same!") end - + if isnothing(atts) # if nothing is given as attributes, then we note that in GeoData atts = Dict("note" => "No attributes were given to this dataset") @@ -209,8 +209,8 @@ struct GeoData <: AbstractGeneralGrid if !(typeof(atts)<: Dict) error("Attributes should be given as Dict!") end - end - + end + return new(lon,lat,depth,fields,atts) end @@ -223,10 +223,26 @@ function Base.show(io::IO, d::GeoData) println(io," size : $(size(d.lon))") println(io," lon ϵ [ $(first(d.lon.val)) : $(last(d.lon.val))]") println(io," lat ϵ [ $(first(d.lat.val)) : $(last(d.lat.val))]") - println(io," depth ϵ [ $(first(d.depth.val)) : $(last(d.depth.val))]") + if any(isnan.(NumValue(d.depth))) + z_vals = extrema(d.depth.val[isnan.(d.depth.val).==false]) + println(io," depth ϵ [ $(z_vals[1]) : $(z_vals[2])]; has NaN's") + else + z_vals = extrema(d.depth.val) + println(io," depth ϵ [ $(z_vals[1]) : $(z_vals[2])]") + end println(io," fields : $(keys(d.fields))") + + # Only print attributes if we have non-default attributes if any( propertynames(d) .== :atts) - println(io," attributes: $(keys(d.atts))") + show_atts = true + if haskey(d.atts,"note") + if d.atts["note"]=="No attributes were given to this dataset" + show_atts = false + end + end + if show_atts + println(io," attributes: $(keys(d.atts))") + end end end @@ -239,7 +255,7 @@ Cartesian data in `x/y/z` coordinates to be used with Paraview. This is usually generated automatically from the `GeoData` structure, but you can also invoke do this manually: ```julia-repl -julia> Data_set = GeophysicalModelGenerator.GeoData(1.0:10.0,11.0:20.0,(-20:-11)*km,(DataFieldName=(-20:-11),)) +julia> Data_set = GeophysicalModelGenerator.GeoData(1.0:10.0,11.0:20.0,(-20:-11)*km,(DataFieldName=(-20:-11),)) julia> Data_cart = convert(ParaviewData, Data_set) ``` """ @@ -256,29 +272,50 @@ function Base.show(io::IO, d::ParaviewData) println(io," size : $(size(d.x))") println(io," x ϵ [ $(first(d.x.val)) : $(last(d.x.val))]") println(io," y ϵ [ $(first(d.y.val)) : $(last(d.y.val))]") - println(io," z ϵ [ $(first(d.z.val)) : $(last(d.z.val))]") + if any(isnan.(NumValue(d.z))) + z_vals = extrema(d.z.val[isnan.(d.z.val).==false]) + println(io," z ϵ [ $(z_vals[1]) : $(z_vals[2])]; has NaN's") + else + z_vals = extrema(d.z.val) + println(io," z ϵ [ $(z_vals[1]) : $(z_vals[2])]") + end + println(io," fields: $(keys(d.fields))") + + # Only print attributes if we have non-default attributes + if any( propertynames(d) .== :atts) + show_atts = true + if haskey(d.atts,"note") + if d.atts["note"]=="No attributes were given to this dataset" + show_atts = false + end + end + if show_atts + println(io," attributes: $(keys(d.atts))") + end + end + end # conversion function from GeoData -> ParaviewData -function Base.convert(::Type{ParaviewData}, d::GeoData) - +function Base.convert(::Type{ParaviewData}, d::GeoData) + # Utilize the Geodesy.jl package & use the Cartesian Earth-Centered-Earth-Fixed (ECEF) coordinate system lon = Array(ustrip.(d.lon.val)); lat = Array(ustrip.(d.lat.val)); LLA_Data = LLA.(lat,lon, Array(ustrip.(d.depth.val))*1000); # convert to LLA from Geodesy package X,Y,Z = zeros(size(lon)), zeros(size(lon)), zeros(size(lon)); - + # convert to cartesian ECEF reference frame. Note that we use kilometers and the wgs84 for i in eachindex(X) - data_xyz = ECEF(LLA_Data[i], wgs84) + data_xyz = ECEF(LLA_Data[i], wgs84) X[i] = data_xyz.x/1e3; Y[i] = data_xyz.y/1e3; Z[i] = data_xyz.z/1e3; end - - # This is the 'old' implementation, which does not employ a reference ellipsoid + + # This is the 'old' implementation, which does not employ a reference ellipsoid # X = R .* cosd.( lon ) .* cosd.( lat ); # Y = R .* sind.( lon ) .* cosd.( lat ); # Z = R .* sind.( lat ); @@ -289,7 +326,7 @@ function Base.convert(::Type{ParaviewData}, d::GeoData) if typeof(d.fields[i]) <: Tuple if length(d.fields[i]) == 3 # the tuple has length 3, which is therefore assumed to be a velocity vector - + # If the field name contains the string "color" we do not apply a vector transformation as it is supposed to contain RGB colors if !occursin("color", string(field_names[i])) println("Applying a vector transformation to field: $(field_names[i])") @@ -306,29 +343,29 @@ end -""" +""" UTMData(EW::Any, NS:Any, depth::GeoUnit, UTMZone::Int, NorthernHemisphere=true, fields::NamedTuple) - + Data structure that holds one or several fields with UTM coordinates (east-west), (north-south) and depth information. - `depth` can have units of meters, kilometer or be unitless; it will be converted to meters (as UTMZ is usually in meters) -- `fields` should ideally be a NamedTuple which allows you to specify the names of each of the fields. +- `fields` should ideally be a NamedTuple which allows you to specify the names of each of the fields. - In case you only pass one array we will convert it to a NamedTuple with default name. - A single field should be added as `(DataFieldName=Data,)` (don't forget the comma at the end). - Multiple fields can be added as well. - In case you want to display a vector field in paraview, add it as a tuple: `(Velocity=(Veast,Vnorth,Vup), Veast=Veast, Vnorth=Vnorth, Vup=Vup)`; we automatically apply a vector transformation when transforming this to a `ParaviewData` structure from which we generate Paraview output. As this changes the magnitude of the arrows, you will no longer see the `[Veast,Vnorth,Vup]` components in Paraview which is why it is a good ideas to store them as separate Fields. -- Yet, there is one exception: if the name of the 3-component field is `colors`, we do not apply this vector transformation as this field is regarded to contain RGB colors. +- Yet, there is one exception: if the name of the 3-component field is `colors`, we do not apply this vector transformation as this field is regarded to contain RGB colors. - `Lat`,`Lon`,`Depth` should have the same size as the `Data` array. The ordering of the arrays is important. If they are 3D arrays, as in the example below, we assume that the first dimension corresponds to `lon`, second dimension to `lat` and third dimension to `depth` (which should be in km). See below for an example. -# Example +# Example ```julia-repl julia> ew = 422123.0:100:433623.0 julia> ns = 4.514137e6:100:4.523637e6 julia> depth = -5400:250:600 julia> EW,NS,Depth = XYZGrid(ew, ns, depth); julia> Data = ustrip.(Depth); -julia> Data_set = UTMData(EW,NS,Depth,33, true, (FakeData=Data,Data2=Data.+1.)) -UTMData +julia> Data_set = UTMData(EW,NS,Depth,33, true, (FakeData=Data,Data2=Data.+1.)) +UTMData UTM zone : 33-33 North size : (116, 96, 25) EW ϵ [ 422123.0 : 433623.0] @@ -340,7 +377,7 @@ UTMData If you wish, you can convert this from `UTMData` to `GeoData` with ```julia-repl julia> Data_set1 = convert(GeoData, Data_set) -GeoData +GeoData size : (116, 96, 25) lon ϵ [ 14.075969111533457 : 14.213417764154963] lat ϵ [ 40.77452227533946 : 40.86110443583479] @@ -357,18 +394,18 @@ julia> Write_Paraview(Data_set1, "Data_set1") """ struct UTMData <: AbstractGeneralGrid EW :: GeoUnit - NS :: GeoUnit + NS :: GeoUnit depth :: GeoUnit zone :: Any northern :: Any - fields :: NamedTuple + fields :: NamedTuple atts :: Dict - + # Ensure that the data is of the correct format function UTMData(EW,NS,depth,zone,northern,fields,atts=nothing) - + # check depth & convert it to units of km in case no units are given or it has different length units - if unit.(depth)[1]==NoUnits + if unit.(depth)[1]==NoUnits depth = depth*m # in case depth has no dimensions end depth = uconvert.(m,depth) # convert to meters @@ -377,10 +414,10 @@ struct UTMData <: AbstractGeneralGrid # Check ordering of the arrays in case of 3D if sum(size(EW).>1)==3 if maximum(abs.(diff(EW,dims=2)))>maximum(abs.(diff(EW,dims=1))) || maximum(abs.(diff(EW,dims=3)))>maximum(abs.(diff(EW,dims=1))) - error("It appears that the EW array has a wrong ordering") + @warn "It appears that the EW array has a wrong ordering" end if maximum(abs.(diff(NS,dims=1)))>maximum(abs.(diff(NS,dims=2))) || maximum(abs.(diff(NS,dims=3)))>maximum(abs.(diff(NS,dims=2))) - error("It appears that the NS array has a wrong ordering") + @warn "It appears that the NS array has a wrong ordering" end end @@ -402,7 +439,7 @@ struct UTMData <: AbstractGeneralGrid DataField = DataField[1]; # in case we have velocity vectors as input end - if !(size(EW)==size(NS)==size(depth)==size(DataField)) + if !(size(EW)==size(NS)==size(depth)==size(DataField)) error("The size of EW/NS/Depth and the Fields should all be the same!") end @@ -410,7 +447,7 @@ struct UTMData <: AbstractGeneralGrid zone = ones(Int64,size(EW))*zone northern = ones(Bool,size(EW))*northern end - + # take care of attributes if isnothing(atts) # if nothing is given as attributes, then we note that in GeoData @@ -423,7 +460,7 @@ struct UTMData <: AbstractGeneralGrid end return new(EW,NS,depth,zone,northern, fields,atts) - + end end @@ -439,17 +476,35 @@ function Base.show(io::IO, d::UTMData) println(io," size : $(size(d.EW))") println(io," EW ϵ [ $(first(d.EW.val)) : $(last(d.EW.val))]") println(io," NS ϵ [ $(first(d.NS.val)) : $(last(d.NS.val))]") - println(io," depth ϵ [ $(first(d.depth.val)) : $(last(d.depth.val))]") + + if any(isnan.(NumValue(d.depth))) + z_vals = extrema(d.depth.val[isnan.(d.depth.val).==false]) + println(io," depth ϵ [ $(z_vals[1]) : $(z_vals[2])]; has NaNs") + else + z_vals = extrema(d.depth.val) + println(io," depth ϵ [ $(z_vals[1]) : $(z_vals[2])]") + end + println(io," fields : $(keys(d.fields))") + + # Only print attributes if we have non-default attributes if any( propertynames(d) .== :atts) - println(io," attributes: $(keys(d.atts))") + show_atts = true + if haskey(d.atts,"note") + if d.atts["note"]=="No attributes were given to this dataset" + show_atts = false + end + end + if show_atts + println(io," attributes: $(keys(d.atts))") + end end end """ Converts a `UTMData` structure to a `GeoData` structure """ -function Base.convert(::Type{GeoData}, d::UTMData) +function Base.convert(::Type{GeoData}, d::UTMData) Lat = zeros(size(d.EW)); Lon = zeros(size(d.EW)); @@ -463,7 +518,7 @@ function Base.convert(::Type{GeoData}, d::UTMData) Lat[i] = lla_i.lat Lon[i] = lon - end + end # handle the case where an old GeoData structure is converted if any( propertynames(d) .== :atts) @@ -484,7 +539,7 @@ end """ Converts a `GeoData` structure to a `UTMData` structure """ -function Base.convert(::Type{UTMData}, d::GeoData) +function Base.convert(::Type{UTMData}, d::GeoData) EW = zeros(size(d.lon)); NS = zeros(size(d.lon)); @@ -496,13 +551,13 @@ function Base.convert(::Type{UTMData}, d::GeoData) # Use functions of the Geodesy package to convert to LLA lla_i = LLA(d.lat.val[i],d.lon.val[i],Float64(ustrip.(d.depth.val[i])*1e3)) utmz_i = UTMZ(lla_i, wgs84) - + EW[i] = utmz_i.x NS[i] = utmz_i.y depth[i] = utmz_i.z zone[i] = utmz_i.zone; northern[i] = utmz_i.isnorth - end + end # handle the case where an old GeoData structure is converted if any( propertynames(d) .== :atts) @@ -522,11 +577,11 @@ end This flips the data in the structure in a certain dimension (default is z [3]) """ function flip(Data::GeoData, dimension=3) - + depth = reverse(Data.depth.val,dims=dimension)*Data.depth.unit # flip depth - lon = reverse(Data.lon.val,dims=dimension)*Data.lon.unit # flip - lat = reverse(Data.lat.val,dims=dimension)*Data.lat.unit # flip - + lon = reverse(Data.lon.val,dims=dimension)*Data.lon.unit # flip + lat = reverse(Data.lat.val,dims=dimension)*Data.lat.unit # flip + # flip fields fields = Data.fields; name_keys = keys(fields) @@ -540,20 +595,20 @@ end """ - Convert2UTMzone(d::GeoData, p::ProjectionPoint) + Convert2UTMzone(d::GeoData, p::ProjectionPoint) -Converts a `GeoData` structure to fixed UTM zone, around a given `ProjectionPoint` +Converts a `GeoData` structure to fixed UTM zone, around a given `ProjectionPoint` This useful to use real data as input for a cartesian geodynamic model setup (such as in LaMEM). In that case, we need to project map coordinates to cartesian coordinates. One way to do this is by using UTM coordinates. Close to the `ProjectionPoint` the resulting coordinates will be rectilinear and distance in meters. The map distortion becomes larger the further you are away from the center. - + """ -function Convert2UTMzone(d::GeoData, proj::ProjectionPoint) +function Convert2UTMzone(d::GeoData, proj::ProjectionPoint) EW = zeros(size(d.lon)); NS = zeros(size(d.lon)); zone = zeros(Int64,size(d.lon)); northern = zeros(Bool,size(d.lon)); - trans = UTMfromLLA(proj.zone, proj.isnorth, wgs84) + trans = UTMfromLLA(proj.zone, proj.isnorth, wgs84) for i in eachindex(d.lon.val) # Use functions of the Geodesy package to convert to LLA @@ -564,7 +619,7 @@ function Convert2UTMzone(d::GeoData, proj::ProjectionPoint) NS[i] = utm_i.y zone[i] = proj.zone; northern[i] = proj.isnorth - end + end # handle the case where an old GeoData structure is converted if any( propertynames(d) .== :atts) @@ -579,21 +634,21 @@ end -""" +""" CartData(x::Any, y::Any, z::GeoUnit, fields::NamedTuple) - + Data structure that holds one or several fields with with Cartesian x/y/z coordinates. Distances are in kilometers - `x`,`y`,`z` can have units of meters, kilometer or be unitless; they will be converted to kilometers -- `fields` should ideally be a NamedTuple which allows you to specify the names of each of the fields. +- `fields` should ideally be a NamedTuple which allows you to specify the names of each of the fields. - In case you only pass one array we will convert it to a NamedTuple with default name. - A single field should be added as `(DataFieldName=Data,)` (don't forget the comma at the end). - Multiple fields can be added as well. - In case you want to display a vector field in paraview, add it as a tuple: `(Velocity=(Vx,Vnorth,Vup), Veast=Veast, Vnorth=Vnorth, Vup=Vup)`; we automatically apply a vector transformation when transforming this to a `ParaviewData` structure from which we generate Paraview output. As this changes the magnitude of the arrows, you will no longer see the `[Veast,Vnorth,Vup]` components in Paraview which is why it is a good ideas to store them as separate Fields. -- Yet, there is one exception: if the name of the 3-component field is `colors`, we do not apply this vector transformation as this field is regarded to contain RGB colors. +- Yet, there is one exception: if the name of the 3-component field is `colors`, we do not apply this vector transformation as this field is regarded to contain RGB colors. - `x`,`y`,`z` should have the same size as the `Data` array. The ordering of the arrays is important. If they are 3D arrays, as in the example below, we assume that the first dimension corresponds to `x`, second dimension to `y` and third dimension to `z` (which should be in km). See below for an example. -# Example +# Example ```julia-repl julia> x = 0:2:10 julia> y = -5:5 @@ -601,7 +656,7 @@ julia> z = -10:2:2 julia> X,Y,Z = XYZGrid(x, y, z); julia> Data = Z julia> Data_set = CartData(X,Y,Z, (FakeData=Data,Data2=Data.+1.)) -CartData +CartData size : (6, 11, 7) x ϵ [ 0.0 km : 10.0 km] y ϵ [ -5.0 km : 5.0 km] @@ -620,7 +675,7 @@ julia> Write_Paraview(Data_set, "Data_set") If you wish, you can convert this to `UTMData` (which will simply convert the ) ```julia-repl julia> Data_set1 = convert(GeoData, Data_set) -GeoData +GeoData size : (116, 96, 25) lon ϵ [ 14.075969111533457 : 14.213417764154963] lat ϵ [ 40.77452227533946 : 40.86110443583479] @@ -632,21 +687,21 @@ which would allow visualizing this in paraview in the usual manner: """ struct CartData <: AbstractGeneralGrid x :: GeoUnit - y :: GeoUnit + y :: GeoUnit z :: GeoUnit fields :: NamedTuple - atts :: Dict - + atts :: Dict + # Ensure that the data is of the correct format function CartData(x,y,z,fields,atts=nothing) - + # Check ordering of the arrays in case of 3D if sum(size(x).>1)==3 if maximum(abs.(diff(x,dims=2)))>maximum(abs.(diff(x,dims=1))) || maximum(abs.(diff(x,dims=3)))>maximum(abs.(diff(x,dims=1))) - error("It appears that the x-array has a wrong ordering") + @warn "It appears that the x-array has a wrong ordering" end if maximum(abs.(diff(y,dims=1)))>maximum(abs.(diff(y,dims=2))) || maximum(abs.(diff(y,dims=3)))>maximum(abs.(diff(y,dims=2))) - error("It appears that the y-array has a wrong ordering") + @warn "It appears that the y-array has a wrong ordering" end end @@ -654,7 +709,7 @@ struct CartData <: AbstractGeneralGrid x = Convert!(x,km) y = Convert!(y,km) z = Convert!(z,km) - + # fields should be a NamedTuple. In case we simply provide an array, lets transfer it accordingly if !(typeof(fields)<: NamedTuple) if (typeof(fields)<: Tuple) @@ -673,7 +728,7 @@ struct CartData <: AbstractGeneralGrid DataField = DataField[1]; # in case we have velocity vectors as input end - if !(size(x)==size(y)==size(z)==size(DataField)) + if !(size(x)==size(y)==size(z)==size(DataField)) error("The size of x/y/z and the Fields should all be the same!") end @@ -689,7 +744,7 @@ struct CartData <: AbstractGeneralGrid end return new(x,y,z,fields,atts) - + end end @@ -700,10 +755,29 @@ function Base.show(io::IO, d::CartData) println(io," size : $(size(d.x))") println(io," x ϵ [ $(minimum(d.x.val)) : $(maximum(d.x.val))]") println(io," y ϵ [ $(minimum(d.y.val)) : $(maximum(d.y.val))]") - println(io," z ϵ [ $(minimum(d.z.val)) : $(maximum(d.z.val))]") + + if any(isnan.(NumValue(d.z))) + z_vals = extrema(d.z.val[isnan.(d.z.val).==false]) + println(io," z ϵ [ $(z_vals[1]) : $(z_vals[2])]; has NaN's") + else + z_vals = extrema(d.z.val) + println(io," z ϵ [ $(z_vals[1]) : $(z_vals[2])]") + end + + println(io," fields : $(keys(d.fields))") + + # Only print attributes if we have non-default attributes if any( propertynames(d) .== :atts) + show_atts = true + if haskey(d.atts,"note") + if d.atts["note"]=="No attributes were given to this dataset" + show_atts = false + end + end + if show_atts println(io," attributes: $(keys(d.atts))") + end end end @@ -711,10 +785,10 @@ end CartData(xyz::Tuple{Array,Array,Array}) This creates a `CartData` struct if you have a Tuple with 3D coordinates as input. -# Example +# Example ```julia julia> data = CartData(XYZGrid(-10:10,-5:5,0)) -CartData +CartData size : (21, 11, 1) x ϵ [ -10.0 km : 10.0 km] y ϵ [ -5.0 km : 5.0 km] @@ -728,11 +802,11 @@ CartData(xyz::Tuple) = CartData(xyz[1],xyz[2],xyz[3],(Z=xyz[3],)) """ - Convert2UTMzone(d::CartData, proj::ProjectionPoint) + Convert2UTMzone(d::CartData, proj::ProjectionPoint) This transfers a `CartData` dataset to a `UTMData` dataset, that has a single UTM zone. The point around which we project is `ProjectionPoint` """ -function Convert2UTMzone(d::CartData, proj::ProjectionPoint) +function Convert2UTMzone(d::CartData, proj::ProjectionPoint) return UTMData(ustrip.(d.x.val).*1e3 .+ proj.EW,ustrip.(d.y.val).*1e3 .+ proj.NS, ustrip.(d.z.val).*1e3,proj.zone, proj.isnorth, d.fields, d.atts) @@ -743,7 +817,7 @@ end Convert2CartData(d::UTMData, proj::ProjectionPoint) Converts a `UTMData` structure to a `CartData` structure, which essentially transfers the dimensions to km """ -function Convert2CartData(d::UTMData, proj::ProjectionPoint) +function Convert2CartData(d::UTMData, proj::ProjectionPoint) # handle the case where an old structure is converted if any( propertynames(d) .== :atts) @@ -761,7 +835,7 @@ end Convert2CartData(d::GeoData, proj::ProjectionPoint) Converts a `GeoData` structure to a `CartData` structure, which essentially transfers the dimensions to km """ -function Convert2CartData(d::GeoData, proj::ProjectionPoint) +function Convert2CartData(d::GeoData, proj::ProjectionPoint) d_UTM = Convert2UTMzone(d,proj) return CartData( (ustrip.(d_UTM.EW.val) .- proj.EW)./1e3, (ustrip.(d_UTM.NS.val) .- proj.NS)./1e3, @@ -809,7 +883,7 @@ function LonLatDepthGrid(Lon::Any, Lat::Any, Depth::Any) if nLon==nLat==nDepth==1 error("Cannot use this routine for a 3D point (no need to create a grid in that case") - end + end if maximum([length(size(Lon)), length(size(Lat)), length(size(Depth))])>1 error("You can only give 1D vectors or numbers as input") end @@ -861,32 +935,32 @@ end In-place conversion of velocities in spherical velocities `[Veast, Vnorth, Vup]` to cartesian coordinates (for use in paraview). NOTE: the magnitude of the vector will be the same, but the individual `[Veast, Vnorth, Vup]` components -will not be retained correctly (as a different `[x,y,z]` coordinate system is used in paraview). +will not be retained correctly (as a different `[x,y,z]` coordinate system is used in paraview). Therefore, if you want to display or color that correctly in Paraview, you need to store these magnitudes as separate fields """ function Velocity_SphericalToCartesian!(Data::GeoData, Velocity::Tuple) - # Note: This is partly based on scripts originally written by Tobias Baumann, Uni Mainz + # Note: This is partly based on scripts originally written by Tobias Baumann, Uni Mainz for i in eachindex(Data.lat.val) az = Data.lon.val[i]; el = Data.lat.val[i]; R = [-sind(az) -sind(el)*cosd(az) cosd(el)*cosd(az); - cosd(az) -sind(el)*sind(az) cosd(el)*sind(az); + cosd(az) -sind(el)*sind(az) cosd(el)*sind(az); 0.0 cosd(el) sind(el) ]; - + V_sph = [Velocity[1][i]; Velocity[2][i]; Velocity[3][i] ]; - + # Normalize spherical velocity V_mag = sum(sqrt.(V_sph.^2)); # magnitude - V_norm = V_sph/V_mag + V_norm = V_sph/V_mag V_xyz_norm = R*V_norm; V_xyz = V_xyz_norm.*V_mag; # scale with magnitude - # in-place saving of rotated velocity - Velocity[1][i] = V_xyz[1]; + # in-place saving of rotated velocity + Velocity[1][i] = V_xyz[1]; Velocity[2][i] = V_xyz[2]; Velocity[3][i] = V_xyz[3]; end @@ -894,7 +968,7 @@ end # Internal function that converts arrays to a GeoUnit with certain units function Convert!(d,u) - if unit.(d)[1]==NoUnits + if unit.(d)[1]==NoUnits d = d*u # in case it has no dimensions end d = uconvert.(u,d) # convert to u @@ -953,18 +1027,18 @@ struct CartGrid{FT, D} <: AbstractGeneralGrid N :: NTuple{D,Int} # Number of grid points in every direction Δ :: NTuple{D,FT} # (constant) spacing in every direction L :: NTuple{D,FT} # Domain size - min :: NTuple{D,FT} # start of the grid in every direction - max :: NTuple{D,FT} # end of the grid in every direction + min :: NTuple{D,FT} # start of the grid in every direction + max :: NTuple{D,FT} # end of the grid in every direction coord1D :: NTuple{D,StepRangeLen{FT}} # Tuple with 1D vectors in all directions coord1D_cen :: NTuple{D,StepRangeLen{FT}} # Tuple with 1D vectors of center points in all directions -end +end """ Grid = CreateCartGrid(; size=(), x = nothing, z = nothing, y = nothing, extent = nothing, CharDim = nothing) -Creates a 1D, 2D or 3D cartesian grid of given size. Grid can be created by defining the size and either the `extent` (length) of the grid in all directions, or by defining start & end points (`x`,`y`,`z`). +Creates a 1D, 2D or 3D cartesian grid of given size. Grid can be created by defining the size and either the `extent` (length) of the grid in all directions, or by defining start & end points (`x`,`y`,`z`). If you specify `CharDim` (a structure with characteristic dimensions created with `GeoParams.jl`), we will nondimensionalize the grd before creating the struct. Spacing is assumed to be constant in a given direction @@ -980,22 +1054,22 @@ Note: since this is mostly for solid Earth geoscience applications, the second d A basic case with non-dimensional units: ```julia julia> Grid = CreateCartGrid(size=(10,20),x=(0.,10), z=(2.,10)) -Grid{Float64, 2} - size: (10, 20) - length: (10.0, 8.0) - domain: x ∈ [0.0, 10.0], z ∈ [2.0, 10.0] - grid spacing Δ: (1.1111111111111112, 0.42105263157894735) +Grid{Float64, 2} + size: (10, 20) + length: (10.0, 8.0) + domain: x ∈ [0.0, 10.0], z ∈ [2.0, 10.0] + grid spacing Δ: (1.1111111111111112, 0.42105263157894735) ``` An example with dimensional units: ```julia julia> CharDim = GEO_units() julia> Grid = CreateCartGrid(size=(10,20),x=(0.0km, 10km), z=(-20km, 10km), CharDim=CharDim) -CartGrid{Float64, 2} - size: (10, 20) - length: (0.01, 0.03) - domain: x ∈ [0.0, 0.01], z ∈ [-0.02, 0.01] - grid spacing Δ: (0.0011111111111111111, 0.0015789473684210528) +CartGrid{Float64, 2} + size: (10, 20) + length: (0.01, 0.03) + domain: x ∈ [0.0, 0.01], z ∈ [-0.02, 0.01] + grid spacing Δ: (0.0011111111111111111, 0.0015789473684210528) ``` @@ -1007,16 +1081,16 @@ function CreateCartGrid(; extent = nothing, CharDim = nothing ) - + if isa(size, Number) size = (size,) # transfer to tuple end if isa(extent, Number) - extent = (extent,) + extent = (extent,) end N = size - dim = length(N) - + dim = length(N) + # Specify domain by length in every direction if !isnothing(extent) x,y,z = nothing, nothing, nothing @@ -1043,13 +1117,13 @@ function CreateCartGrid(; L = (x[2] - x[1], y[2] - y[1], z[2] - z[1]) X₁= (x[1], y[1], z[1]) end - Xₙ = X₁ .+ L - Δ = L ./ (N .- 1) - - # nondimensionalize + Xₙ = X₁ .+ L + Δ = L ./ (N .- 1) + + # nondimensionalize if !isnothing(CharDim) X₁, Xₙ, Δ, L = GeoUnit.(X₁), GeoUnit.(Xₙ), GeoUnit.(Δ), GeoUnit.(L) - + X₁ = ntuple( i -> nondimensionalize(X₁[i], CharDim), dim) Xₙ = ntuple( i -> nondimensionalize(Xₙ[i], CharDim), dim) Δ = ntuple( i -> nondimensionalize(Δ[i], CharDim), dim) @@ -1058,18 +1132,18 @@ function CreateCartGrid(; X₁, Xₙ, Δ, L = NumValue.(X₁), NumValue.(Xₙ), NumValue.(Δ), NumValue.(L) end - # Generate 1D coordinate arrays of vertexes in all directions + # Generate 1D coordinate arrays of vertices in all directions coord1D=() for idim=1:dim coord1D = (coord1D..., range(X₁[idim], Xₙ[idim]; length = N[idim] )) end - + # Generate 1D coordinate arrays centers in all directionbs coord1D_cen=() for idim=1:dim coord1D_cen = (coord1D_cen..., range(X₁[idim]+Δ[idim]/2, Xₙ[idim]-Δ[idim]/2; length = N[idim]-1 )) end - + ConstantΔ = true; return CartGrid(ConstantΔ,N,Δ,L,X₁,Xₙ,coord1D, coord1D_cen) @@ -1079,7 +1153,7 @@ end # view grid object function show(io::IO, g::CartGrid{FT, DIM}) where {FT, DIM} - + print(io, "CartGrid{$FT, $DIM} \n", " size: $(g.N) \n", " length: $(g.L) \n", @@ -1090,7 +1164,7 @@ end # nice printing of grid function domain_string(grid::CartGrid{FT, DIM}) where {FT, DIM} - + xₗ, xᵣ = grid.coord1D[1][1], grid.coord1D[1][end] if DIM>1 yₗ, yᵣ = grid.coord1D[2][1], grid.coord1D[2][end] @@ -1120,7 +1194,7 @@ function coordinate_grids(Data::CartGrid) end """ - Data = CartData(Grid::CartGrid, fields::NamedTuple; y_val=0.0) + Data = CartData(Grid::CartGrid, fields::NamedTuple; y_val=0.0) Returns a CartData set given a cartesian grid `Grid` and `fields` defined on that grid. """ @@ -1136,8 +1210,8 @@ function CartData(Grid::CartGrid, fields::NamedTuple; y_val=0.0) dat = reshape(fields[ifield],Grid.N[1],1,Grid.N[2]); # reshape into 3D form fields = merge(fields, [names[ifield] => dat]) end - + end - + return CartData(X,Y,Z, fields) -end \ No newline at end of file +end diff --git a/src/stl.jl b/src/stl.jl index 45e69b06..97a5300e 100644 --- a/src/stl.jl +++ b/src/stl.jl @@ -1,12 +1,12 @@ # NOTE: this remains WIP, which is why it is not yet in the documentation -# These are routines that allow importing *.stl triangular surfaces and employ them +# These are routines that allow importing *.stl triangular surfaces and employ them using FileIO using GeometryBasics: TriangleP, Mesh, normals, PointMeta, coordinates using LinearAlgebra -# Warning: the TriangleIntersect dependency does not seem to work on different machines, as the developer did not add a version nunber.. -# That forces us to remove it here, and +# Warning: the TriangleIntersect dependency does not seem to work on different machines, as the developer did not add a version number.. +# That forces us to remove it here, and #using TriangleIntersect export Ray, Intersection, IntersectRayTriangle, load, TriangleNormal, Point, IntersectRayMesh, coordinates @@ -19,12 +19,12 @@ Base.convert(::Type{Triangle}, t::TriangleP) = Triangle(convert(Point,t[1]) Triangle(t::TriangleP) = convert(Triangle,t) -# Define a few routins to allow computing the intersection of a ray with a triangle +# Define a few routines to allow computing the intersection of a ray with a triangle #/(p::GeometryBasics.Point, n::Number) = GeometryBasics.Point(p.x/n, p.y/n, p.z/n) #unitize(p::GeometryBasics.Point) = p/(p*p) #= -# Intersection +# Intersection struct Intersection ip::Point # intersecting point id::Float64 # intersecting distance @@ -63,7 +63,7 @@ function IntersectRayTriangle(r::Ray, t::TriangleP, normal) v2v2 = dot(v2,v2) v1v2 = dot(v1,v2) t_denom = v1v2*v1v2 - v1v1*v2v2 - + denom = dot(normal,r.direction) denom == 0 && return no_intersection ri = normal*(t[1] - r.origin) / denom @@ -80,7 +80,7 @@ function IntersectRayTriangle(r::Ray, t::TriangleP, normal) t_intersection >= 1 && return no_intersection s_intersection + t_intersection >= 1 && return no_intersection return Intersection(t[1] + s_intersection*v1+t_intersection*v2, ri, true) - + end =# @@ -119,31 +119,31 @@ function STLToSurface(name::String, Xin, Yin, minZ) else X,Y = Xin, Yin; end - + Z = ones(size(X))*NaN - + minM = minimum.(mesh) maxM = maximum.(mesh) max_x = [maxM[i][1] for i=1:length(maxM)] min_x = [minM[i][1] for i=1:length(maxM)] max_y = [maxM[i][2] for i=1:length(maxM)] min_y = [minM[i][2] for i=1:length(maxM)] - + for i in eachindex(X) - + r_up = Ray(Point(X[i],Y[i],minZ),Point(0.0, 0.0, 1.0)) - - ind = findall( (X[i] .>= min_x) .& (X[i] .<= max_x) .& + + ind = findall( (X[i] .>= min_x) .& (X[i] .<= max_x) .& (Y[i] .>= min_y) .& (Y[i] .<= max_y) ) for iT in eachindex(ind) - + is = intersect(r_up, Triangle(mesh[ind[iT]])) - if is.is_intersection + if is.is_intersection Z[i] = is.ip.z end - + end end @@ -153,7 +153,7 @@ function STLToSurface(name::String, Xin, Yin, minZ) X_out[:,:,1] = X; Y_out[:,:,1] = Y; Z_out[:,:,1] = Z; - + Data = ParaviewData(X_out,Y_out,Z_out,(depth=Z_out,)) return Data @@ -162,26 +162,26 @@ end =# -""" - inside = IsInsideClosedSTL(mesh::Mesh, Pt, eps=1e-3) +""" + inside = IsInsideClosedSTL(mesh::Mesh, Pt, eps=1e-3) Determine whether a point `Pt` is inside a 3D closed triangular `*.stl` surface or not. -This implements the winding number method, following the python code: +This implements the winding number method, following the python code: https://github.com/marmakoide/inside-3d-mesh This again is described in the following [paper](https://igl.ethz.ch/projects/winding-number/) by Alec Jacobson, Ladislav Kavan and Olga Sorkine-Hornung. """ -function IsInsideClosedSTL(mesh::Mesh, Pt::Vector, eps=1e-3) +function IsInsideClosedSTL(mesh::Mesh, Pt::Vector, eps=1e-3) # Compute triangle vertices and their norms relative to X - M_vec = [mesh.position[i]-Pt[:] for i in eachindex(mesh.position)]; + M_vec = [mesh.position[i]-Pt[:] for i in eachindex(mesh.position)]; M = zeros(length(M_vec),3); for i=1:length(M_vec) M[i,:] = Float64.(M_vec[i][1:3]); end M_norm = sqrt.(sum(M.^2,dims=2)) - + # Accumulate generalized winding number per triangle winding_number = 0. for iT=1:length(mesh) @@ -204,8 +204,7 @@ function IsInsideClosedSTL(mesh::Mesh, Pt::Vector, eps=1e-3) else isinside = false end - - + + return isinside end - diff --git a/src/transformation.jl b/src/transformation.jl index da79dbf7..2eb36222 100644 --- a/src/transformation.jl +++ b/src/transformation.jl @@ -44,6 +44,32 @@ function ProjectCartData(d_cart::CartData, d::GeoData, p::ProjectionPoint) return d_cart end +""" + d_cart = ProjectCartData(d_cart::CartData, d::GeoData, p::ProjectionPoint) + +Projects all datafields from the GeoData struct `d` to the CartData struct `d_cart`, around the projection point `p`. +`d_cart` *must* be an orthogonal cartesian grid (deformed doesn't work; use `Convert2CartData(d, proj)`, where `proj` is a projection point in that case). + +# Note: +- If `d_cart` and `d` are horizontal surfaces (3rd dimension has size==1), it also interpolates the depth coordinate. + +""" +function ProjectCartData(d_cart::CartData, d_cart_data0::CartData) + + if size(d_cart_data0.x.val,3)==1 + z_new, fields_new = InterpolateDataFields2D(d,d_cart_data0.x.val, d_cart_data0.y.val) + + # Create new struct + d_cart = CartData(d_cart.x.val,d_cart.y.val,z_new,fields_new) + + else + d_data = InterpolateDataFields(d, d_cart_data0.x.val, d_cart_data0.y.val, d_cart_data0.z.val) + d_cart = CartData(d_cart.x.val,d_cart.y.val,d_cart.z.val,d_data.fields) + + end + + return d_cart +end """ diff --git a/src/utils.jl b/src/utils.jl index 274a4433..31ad6ff9 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -1,4 +1,4 @@ -# few utils that are useful +# few utils that are useful export meshgrid, CrossSection, CrossSectionVolume, CrossSectionSurface, CrossSectionPoints, ExtractSubvolume, SubtractHorizontalMean export ParseColumns_CSV_File, AboveSurface, BelowSurface, VoteMap @@ -7,6 +7,7 @@ export RotateTranslateScale export DrapeOnTopo, LithostaticPressure! export FlattenCrossSection export AddField, RemoveField +export SubtractSurfaces!, AddSurfaces!, remove_NaN_Surface! using NearestNeighbors @@ -31,26 +32,26 @@ end """ V = AddField(V::AbstractGeneralGrid,field_name::String,data::Any) -Add Fields Data to GeoData or CartData +Add Fields Data to GeoData or CartData """ function AddField(V::AbstractGeneralGrid,field_name::String,data::Any) fields_new = V.fields; new_field = NamedTuple{(Symbol(field_name),)}((data,)); - fields_new = merge(fields_new, new_field); # replace the field in fields_new + fields_new = merge(fields_new, new_field); # replace the field in fields_new - if isa(V,GeoData) + if isa(V,GeoData) V = GeoData(V.lon.val,V.lat.val,V.depth.val,fields_new) elseif isa(V,CartData) V = CartData(V.x.val,V.y.val,V.z.val,fields_new) else error("AddField is only implemented for GeoData and CartData structures") end - - return V + + return V end -# this function is taken from @JeffreySarnoff -function dropnames(namedtuple::NamedTuple, names::Tuple{Vararg{Symbol}}) +# this function is taken from @JeffreySarnoff +function dropnames(namedtuple::NamedTuple, names::Tuple{Vararg{Symbol}}) keepnames = Base.diff_names(Base._nt_names(namedtuple), names) return NamedTuple{keepnames}(namedtuple) end @@ -62,26 +63,26 @@ Removes the field with name `field_name` from the GeoData or CartData dataset """ function RemoveField(V::AbstractGeneralGrid,field_name::String) - fields_new = V.fields; + fields_new = V.fields; fields_new = dropnames(fields_new, (Symbol(field_name),)) - - if isa(V,GeoData) + + if isa(V,GeoData) V = GeoData(V.lon.val,V.lat.val,V.depth.val,fields_new) elseif isa(V,CartData) V = CartData(V.x.val,V.y.val,V.z.val,fields_new) else error("RemoveField is only implemented for GeoData and CartData structures") end - - return V + + return V end """ -CrossSectionVolume(Volume::GeoData; dims=(100,100), Interpolate=false, Depth_level=nothing; Lat_level=nothing; Lon_level=nothing; Start=nothing, End=nothing, Depth_extent=nothing ) +CrossSectionVolume(Volume::AbstractGeneralGrid; dims=(100,100), Interpolate=false, Depth_level=nothing; Lat_level=nothing; Lon_level=nothing; Start=nothing, End=nothing, Depth_extent=nothing ) -Creates a cross-section through a volumetric (3D) `GeoData` object. +Creates a cross-section through a volumetric (3D) `GeoData` object. - Cross-sections can be horizontal (map view at a given depth), if `Depth_level` is specified - They can also be vertical, either by specifying `Lon_level` or `Lat_level` (for a fixed lon/lat), or by defining both `Start=(lon,lat)` & `End=(lon,lat)` points. @@ -94,9 +95,9 @@ Creates a cross-section through a volumetric (3D) `GeoData` object. julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,(-300:25:0)km); julia> Data = Depth*2; # some data julia> Vx,Vy,Vz = ustrip(Data*3),ustrip(Data*4),ustrip(Data*5); -julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon, Velocity=(Vx,Vy,Vz))); -julia> Data_cross = CrossSectionVolume(Data_set3D, Depth_level=-100km) -GeoData +julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon, Velocity=(Vx,Vy,Vz))); +julia> Data_cross = CrossSectionVolume(Data_set3D, Depth_level=-100km) +GeoData size : (11, 11, 1) lon ϵ [ 10.0 : 20.0] lat ϵ [ 30.0 : 40.0] @@ -119,7 +120,7 @@ function CrossSectionVolume(V::AbstractGeneralGrid; dims=(100,100), Interpolate= X,Y,Z = coordinate_grids(V) if !isnothing(Depth_level) # Horizontal slice - CheckBounds(Z, Depth_level) + CheckBounds(Z, Depth_level) if Interpolate Lon,Lat,Depth = LonLatDepthGrid( LinRange(minimum(X), maximum(X), dims[1]), LinRange(minimum(Y), maximum(Y), dims[2]), @@ -133,7 +134,7 @@ function CrossSectionVolume(V::AbstractGeneralGrid; dims=(100,100), Interpolate= end if !isnothing(Lat_level) # vertical slice @ given latitude - CheckBounds(Y, Lat_level) + CheckBounds(Y, Lat_level) if Interpolate Lon,Lat,Depth = LonLatDepthGrid( LinRange(minimum(X), maximum(X), dims[1]), Lat_level, @@ -147,8 +148,8 @@ function CrossSectionVolume(V::AbstractGeneralGrid; dims=(100,100), Interpolate= end if !isnothing(Lon_level) # vertical slice @ given longitude - CheckBounds(X, Lon_level) - if Interpolate + CheckBounds(X, Lon_level) + if Interpolate Lon,Lat,Depth = LonLatDepthGrid( Lon_level, LinRange(minimum(Y), maximum(Y), dims[1]), LinRange(minimum(Z), maximum(Z), dims[2])) @@ -188,17 +189,17 @@ function CrossSectionVolume(V::AbstractGeneralGrid; dims=(100,100), Interpolate= Lon = zeros(dims[1],dims[2],1) Lat = zeros(dims[1],dims[2],1) Depth = zeros(dims[1],dims[2],1)*Depth_p[1] - + # We need 3D matrixes for the paraview writing routine to know we are in 3D Lon[:,:,1] = Lon_p[:,1,:] Lat[:,:,1] = Lat_p[1,:,:] Depth[:,:,1] = Depth_p[1,:,:] - + end if Interpolate # Interpolate data on profile - DataProfile = InterpolateDataFields(V, Lon, Lat, NumValue(Depth)); + DataProfile = InterpolateDataFields(V, Lon, Lat, NumValue(Depth)); else # extract data (no interpolation) DataProfile = ExtractDataSets(V, iLon, iLat, iDepth); @@ -209,7 +210,7 @@ function CrossSectionVolume(V::AbstractGeneralGrid; dims=(100,100), Interpolate= end -""" +""" CrossSectionSurface(Surface::GeoData; dims=(100,), Interpolate=false, Depth_level=nothing; Lat_level=nothing; Lon_level=nothing; Start=nothing, End=nothing ) Creates a cross-section through a surface (2D) `GeoData` object. @@ -224,9 +225,9 @@ Creates a cross-section through a surface (2D) `GeoData` object. julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,-50km); julia> Data = Depth*2; # some data julia> Vx,Vy,Vz = ustrip(Data*3),ustrip(Data*4),ustrip(Data*5); -julia> Data_set2D = GeoData(Lon,Lat,Depth,(Depth=Depth,)); -julia> Data_cross = CrossSectionSurface(Data_set2D, Lat_level =15) -GeoData +julia> Data_set2D = GeoData(Lon,Lat,Depth,(Depth=Depth,)); +julia> Data_cross = CrossSectionSurface(Data_set2D, Lat_level =15) +GeoData size : (100,) lon ϵ [ 10.0 : 20.0] lat ϵ [ 15.0 : 15.0] @@ -244,7 +245,7 @@ function CrossSectionSurface(S::AbstractGeneralGrid; dims=(100,), Interpolate=tr end X,Y,Z = coordinate_grids(S) - + Lon_vec = X[:,1,1] Lat_vec = Y[1,:,1] @@ -285,13 +286,13 @@ function CrossSectionSurface(S::AbstractGeneralGrid; dims=(100,), Interpolate=tr for i = 1:length(S.fields) if typeof(S.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(size(Lon,1),size(Lon,2),length(data_tuple)); # create a 2D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); for j=1:length(data_tuple) interpol = linear_interpolation((Lon_vec, Lat_vec), dropdims(ustrip.(data_tuple[j]),dims=3),extrapolation_bc = NaN); # create interpolation object - data_array[:,:,j] = interpol.(Lon, Lat); + data_array[:,:,j] = interpol.(Lon, Lat); end data_new = tuple([data_array[:,:,c] for c in 1:size(data_array,3)]...) # transform 3D matrix to tuple, do not add unit, as this creates an error in GMG (Issue), to add the unit: *unit(S.fields[i][1][1]) @@ -300,11 +301,11 @@ function CrossSectionSurface(S::AbstractGeneralGrid; dims=(100,), Interpolate=tr interpol = linear_interpolation((Lon_vec, Lat_vec), dropdims(ustrip.(S.fields[i]),dims=3), extrapolation_bc = NaN); data_new = interpol.(Lon, Lat)*unit(S.fields[i][1]); # interpolate data field end - - # replace the field + + # replace the field new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name and unit fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end # create GeoData structure with the interpolated points @@ -316,19 +317,19 @@ end """ function CrossSectionPoints(P::GeoData; Depth_level=nothing, Lat_level=nothing, Lon_level=nothing, Start=nothing, End=nothing, section_width=50 ) - + Creates a projection of separate points (saved as a GeoData object) onto a chosen plane. Only points with a maximum distance of section_width are taken into account """ function CrossSectionPoints(P::GeoData; Depth_level=nothing, Lat_level=nothing, Lon_level=nothing, Start=nothing, End=nothing, section_width = 10km) - + DataSetType = CheckDataSet(P); if DataSetType != 1 error("CrossSectionPoints: the input data set has to be a pointwise data set!") end - if !isnothing(Depth_level) - ind = findall(-0.5*ustrip(section_width) .< (NumValue(P.depth) .- ustrip(Depth_level)) .< 0.5*ustrip(section_width)) # find all points around the desired depth level, both units shoud be in km, so no unit transformation required + if !isnothing(Depth_level) + ind = findall(-0.5*ustrip(section_width) .< (NumValue(P.depth) .- ustrip(Depth_level)) .< 0.5*ustrip(section_width)) # find all points around the desired depth level, both units should be in km, so no unit transformation required # create temporary variables lon_tmp = NumValue(P.lon.val[ind]) @@ -343,7 +344,7 @@ function CrossSectionPoints(P::GeoData; Depth_level=nothing, Lat_level=nothing, if !isnothing(Lat_level) # vertical slice @ given latitude p_Point = ProjectionPoint(Lat=Lat_level,Lon=sum(P.lon.val)/length(P.lon.val)) # define the projection point (lat/lon) as the latitude and the mean of the longitudes of the data - P_UTM = Convert2UTMzone(P, p_Point) # convert to UTM + P_UTM = Convert2UTMzone(P, p_Point) # convert to UTM ind = findall(-0.5*ustrip(uconvert(u"m",section_width)) .< (P_UTM.NS.val .- p_Point.NS) .< 0.5*ustrip(uconvert(u"m",section_width))) # find all points around the desired latitude level, UTM is in m, so we have to convert the section width # create temporary variables @@ -383,7 +384,7 @@ function CrossSectionPoints(P::GeoData; Depth_level=nothing, Lat_level=nothing, # choose projection point based on Start and End coordinates of the profile p_Point = ProjectionPoint(Lat=0.5*(Start[2]+End[2]),Lon=0.5*(Start[1]+End[1])) - + # convert P to UTM Data P_UTM = Convert2UTMzone(P, p_Point) # convert to UTM @@ -408,7 +409,7 @@ function CrossSectionPoints(P::GeoData; Depth_level=nothing, Lat_level=nothing, t = (nx*Profile_UTM.EW.val[1] .- nx*P_UTM.EW.val .+ ny*Profile_UTM.NS.val[1] .- ny*P_UTM.NS.val .+ nz*Profile_UTM.depth.val[1]*1e3 .- nz*P_UTM.depth.val*1e3)/(nx*nx+ny*ny+nz*nz) - # compute the distance to the plane + # compute the distance to the plane dist = sqrt.((t.*nx).^2 + (t.*ny).^2 + (t.*nz).^2) # find the points that are within the required window around the profile @@ -447,25 +448,25 @@ function CrossSectionPoints(P::GeoData; Depth_level=nothing, Lat_level=nothing, for i = 1:length(P.fields) if typeof(P.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(size(ind,1),length(data_tuple)); # create a 2D array that holds the chosen values for j=1:length(data_tuple) - data_array[:,j] = ustrip.(data_tuple[i][ind]) + data_array[:,j] = ustrip.(data_tuple[i][ind]) end data_new = tuple([data_array[:,:,c] for c in 1:size(data_array,3)]...) # transform 2D matrix to tuple, do not consider the unit as it creates an error in GMG (Issue), to add the unit: *unit.(P.fields[i][1][1] - + else # scalar field data_new = fields_new[i][ind]; # interpolate data field end - - # replace the field + + # replace the field new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name and unit fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end - + # merge old and new fields fields_new = merge(fields_new,field_tmp); @@ -484,7 +485,7 @@ end """ CrossSection(DataSet::AbstractGeneralGrid; dims=(100,100), Interpolate=false, Depth_level=nothing, Lat_level=nothing, Lon_level=nothing, Start=nothing, End=nothing, Depth_extent=nothing, section_width=50km) -Creates a cross-section through a `GeoData` object. +Creates a cross-section through a `GeoData` object. - Cross-sections can be horizontal (map view at a given depth), if `Depth_level` is specified - They can also be vertical, either by specifying `Lon_level` or `Lat_level` (for a fixed lon/lat), or by defining both `Start=(lon,lat)` & `End=(lon,lat)` points. @@ -517,15 +518,17 @@ function CrossSection(DataSet::AbstractGeneralGrid; dims=(100,100), Interpolate= DataSetType = CheckDataSet(DataSet); # check which kind of data set we are dealing with if DataSetType==1 # points - DataProfile = CrossSectionPoints(DataSet; Depth_level, Lat_level, Lon_level, Start, End, section_width) + DataProfile = CrossSectionPoints(DataSet; Depth_level, Lat_level, Lon_level, Start, End, section_width) elseif DataSetType==2 # surface DataProfile = CrossSectionSurface(DataSet; dims, Depth_level, Lat_level, Lon_level, Start, End) - elseif DataSetType==3 # volume + elseif DataSetType==3 # volume DataProfile = CrossSectionVolume(DataSet; dims, Interpolate, Depth_level, Lat_level, Lon_level, Start, End, Depth_extent) + + # add field that has coordinates along the profile + DataProfile = AddField(DataProfile,"FlatCrossSection", FlattenCrossSection(DataProfile)) end return DataProfile - end """ @@ -544,7 +547,7 @@ x_new = FlattenCrossSection(Data_Cross) This flattened CrossSection can be added to original Data_Cross by AddField() Data_Cross = AddField(Data_Cross,"FlatCrossSection", x_new) -CartData +CartData size : (100, 100, 1) x ϵ [ 0.0 : 99.9] y ϵ [ -10.0 : 20.0] @@ -572,9 +575,9 @@ end julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,(-300:25:0)km); julia> Data = Depth*2; # some data julia> Vx,Vy,Vz = ustrip(Data*3),ustrip(Data*4),ustrip(Data*5); - julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon, Velocity=(Vx,Vy,Vz))); - julia> Data_cross = CrossSection(Data_set3D, Start=(10,30),End=(20,40)) - julia> x_profile = FlattenCrossSection(Data_cross) + julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon, Velocity=(Vx,Vy,Vz))); + julia> Data_cross = CrossSection(Data_set3D, Start=(10,30),End=(20,40)) + julia> x_profile = FlattenCrossSection(Data_cross) julia> Data_cross = AddField(Data_cross,"x_profile",x_profile) ``` @@ -602,7 +605,7 @@ Extract or "cuts-out" a piece of a 2D or 3D GeoData set, defined by `Lon`, `Lat` This is useful if you are only interested in a part of a much bigger larger data set. -- `Lon_level`,`Lat_level` and `Depth_level` should be tuples that indicate `(minimum_value, maximum_value)` along the respective direction. If not specified we use the full range. +- `Lon_level`,`Lat_level` and `Depth_level` should be tuples that indicate `(minimum_value, maximum_value)` along the respective direction. If not specified we use the full range. - By default, `Interpolate=false` and we find the closest indices within the data set (so your new data set will not go exactly from minimum to maximum). - Alternatively, if `Interpolate=true` we interpolate the data onto a new grid that has dimensions `dims`. This can be useful to compare data sets that are originally given in different resolutions. @@ -612,14 +615,14 @@ julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,(-300:25:0)km); julia> Data = Depth*2; # some data julia> Vx,Vy,Vz = ustrip(Data*3),ustrip(Data*4),ustrip(Data*5); julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon, Velocity=(Vx,Vy,Vz))) -GeoData +GeoData size : (11, 11, 13) lon ϵ [ 10.0 : 20.0] lat ϵ [ 30.0 : 40.0] depth ϵ [ -300.0 km : 0.0 km] fields: (:Depthdata, :LonData, :Velocity) julia> Data_extracted = ExtractSubvolume(Data_set3D,Lon_level=(10,12),Lat_level=(35,40)) -GeoData +GeoData size : (3, 6, 13) lon ϵ [ 10.0 : 12.0] lat ϵ [ 35.0 : 40.0] @@ -632,7 +635,7 @@ By default it extracts the data points closest to the area defined by Lon_level/ Alternatively, you can also interpolate the data onto a new grid: ```julia julia> Data_extracted = ExtractSubvolume(Data_set3D,Lon_level=(10,12),Lat_level=(35,40), Interpolate=true, dims=(50,51,52)) -GeoData +GeoData size : (50, 51, 52) lon ϵ [ 10.0 : 12.0] lat ϵ [ 35.0 : 40.0] @@ -662,10 +665,10 @@ function ExtractSubvolume(V::GeoData; Interpolate=false, Lon_level=nothing, Lat_ # Don't interpolate i_s, i_e = argmin(abs.(V.lon.val[:,1,1] .- Lon_level[1])), argmin(abs.(V.lon.val[:,1,1] .- Lon_level[2])) iLon = i_s:i_e; - + i_s, i_e = argmin(abs.(V.lat.val[1,:,1] .- Lat_level[1])), argmin(abs.(V.lat.val[1,:,1] .- Lat_level[2])) iLat = i_s:i_e; - + i_s, i_e = argmin(abs.(V.depth.val[1,1,:] .- ustrip(Depth_level[1]))), argmin(abs.(V.depth.val[1,1,:] .- ustrip(Depth_level[2]))) step = 1; if i_e Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,(-300:25:0)km); +julia> Data = Depth*2; # some data +julia> Vx,Vy,Vz = ustrip(Data*3),ustrip(Data*4),ustrip(Data*5); +julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon, Velocity=(Vx,Vy,Vz))) +GeoData + size : (11, 11, 13) + lon ϵ [ 10.0 : 20.0] + lat ϵ [ 30.0 : 40.0] + depth ϵ [ -300.0 km : 0.0 km] + fields: (:Depthdata, :LonData, :Velocity) +julia> Data_extracted = ExtractSubvolume(Data_set3D,Lon_level=(10,12),Lat_level=(35,40)) +GeoData + size : (3, 6, 13) + lon ϵ [ 10.0 : 12.0] + lat ϵ [ 35.0 : 40.0] + depth ϵ [ -300.0 km : 0.0 km] + fields: (:Depthdata, :LonData, :Velocity) +``` +By default it extracts the data points closest to the area defined by Lon_level/Lat_level/Depth_level. + + +# 2D Example along a cross-section through 3D data: +```julia-repl +julia> X,Y,Z = XYZGrid(10:20,30:40,-300:25:0); +julia> Data = Z.*2 +julia> Data_Int = Int64.(Data) +julia> DataSet_Cart = CartData(X,Y,Z,(Data=Data,Data_Int=Data_Int, Velocity=(X,Y,Z))) + +julia> Data_cross = CrossSection(DataSet_Cart, Start=(11.0,35), End=(19, 39.0)) +CartData + size : (100, 100, 1) + x ϵ [ 11.0 : 19.0] + y ϵ [ 35.0 : 39.0] + z ϵ [ -300.0 : 0.0] + fields : (:Data, :Data_Int, :Velocity, :FlatCrossSection) + attributes: ["note"] + +julia> Data_extracted = ExtractSubvolume(Data_cross, X_level=(1,7), Z_level=(-200,-100)) + CartData + size : (50, 50, 1) + x ϵ [ 11.894427190999917 : 17.260990336999413] + y ϵ [ 35.44721359549995 : 38.130495168499706] + z ϵ [ -200.0 : -100.0] + fields : (:FlatCrossSection, :Data, :Data_Int, :Velocity) + attributes: ["note"] +julia> typeof(Data_extracted.fields.Data_Int) + Array{Int64, 3} +``` + +""" +function ExtractSubvolume(V::CartData; + Interpolate=true, + X_level=nothing, + X_cross=nothing, + Y_level=nothing, + Z_level=nothing, + dims=(50,50,50)) + + if isnothing(X_level) + X_level = (minimum(V.x.val), maximum(V.x.val)) + end + if isnothing(Y_level) + Y_level = (minimum(V.y.val), maximum(V.y.val)) + end + if isnothing(Z_level) + Z_level = (minimum(V.z.val), maximum(V.z.val)) + end + + if Interpolate==true && size(V.x.val)[3]>1 + X,Y,Z = LonLatDepthGrid( LinRange(X_level[1], X_level[2], dims[1]), + LinRange(Y_level[1], Y_level[2], dims[2]), + LinRange(Z_level[1], Z_level[2], dims[3]) ); + + Data_extract = InterpolateDataFields(V, X, Y, Z) + + elseif size(V.x.val)[3]==1 + + # we are dealing with a vertical cross-section through a 3D dataset computed with CrossSection(V,Start=.., End=...) + Xcross=V.fields.FlatCrossSection; + if isnothing(X_level) + X_level = extrema(Xcross) + end + + dims_cross=(dims[1],dims[2],1); + + # we need to interpolate the data onto a new grid given by X_level and Z_level + X_level_cross = X_level; + + interpol_x = linear_interpolation(Xcross[:,1,1], V.x.val[:,1,1],extrapolation_bc = NaN); # create interpolation object + interpol_y = linear_interpolation(Xcross[:,1,1], V.y.val[:,1,1],extrapolation_bc = NaN); # create interpolation object + + X_level = interpol_x.(X_level_cross) + Y_level = interpol_y.(X_level_cross) + x = LinRange(X_level_cross[1], X_level_cross[2], dims_cross[1]) + z = LinRange(Z_level[1], Z_level[2], dims_cross[2]) + + X,Y,Z = zeros(dims[1],dims[2],1), zeros(dims[1],dims[2],1), zeros(dims[1],dims[2],1) + X_cross = zero(X) + for (i,x_val) in enumerate(x), (j,z_val) in enumerate(z) + X[i,j,1] = interpol_x(x_val) + Y[i,j,1] = interpol_y(x_val) + Z[i,j,1] = z_val + X_cross[i,j,1] = x_val + end + + Data_extract = InterpolateDataFields_CrossSection(V, X, Y, Z, X_cross); + + else + # Don't interpolate + i_s, i_e = argmin(abs.(V.x.val[:,1,1] .- X_level[1])), argmin(abs.(V.x.val[:,1,1] .- X_level[2])) + iLon = i_s:i_e; + + i_s, i_e = argmin(abs.(V.y.val[1,:,1] .- Y_level[1])), argmin(abs.(V.y.val[1,:,1] .- Y_level[2])) + iLat = i_s:i_e; + + i_s, i_e = argmin(abs.(V.z.val[1,1,:] .- ustrip(Z_level[1]))), argmin(abs.(V.z.val[1,1,:] .- ustrip(Z_level[2]))) + step = 1; + if i_emax_Data error("Outside bounds [$min_Data : $max_Data]; $Data_Cross") @@ -688,7 +873,7 @@ function CheckBounds(Data::GeoUnit, Data_Cross) end function CheckBounds(Data::AbstractArray, Data_Cross) - + min_Data, max_Data = NumValue(minimum(Data)), NumValue(maximum(Data)); if ustrip(Data_Cross) < min_Data || ustrip(Data_Cross)>max_Data error("Outside bounds [$min_Data : $max_Data]; $Data_Cross") @@ -699,7 +884,7 @@ end function CheckDataSet(DataSet::GeoData) if length(size(DataSet.lon)) == 1 # scattered points return 1 - else + else if any(size(DataSet.lon).==1) # surface data return 2 else # volume data @@ -711,7 +896,7 @@ end function CheckDataSet(DataSet::CartData) if length(size(DataSet.x)) == 1 # scattered points return 1 - else + else if any(size(DataSet.x).==1) # surface data return 2 else # volume data @@ -722,7 +907,7 @@ end """ - InterpolateDataFields(V::AbstractGeneralGrid, Lon, Lat, Depth) + Data_interp = InterpolateDataFields(V::AbstractGeneralGrid, Lon, Lat, Depth) Interpolates a data field `V` on a grid defined by `Lon,Lat,Depth` @@ -734,14 +919,14 @@ julia> z = -10:2:2 julia> X,Y,Z = XYZGrid(x, y, z); julia> Data = Z julia> Data_set1= CartData(X,Y,Z, (FakeData=Data,Data2=Data.+1.)) -CartData +CartData size : (6, 11, 7) x ϵ [ 0.0 km : 10.0 km] y ϵ [ -5.0 km : 5.0 km] z ϵ [ -10.0 km : 2.0 km] fields : (:FakeData, :Data2) attributes: ["note"] - + julia> X,Y,Z = XYZGrid(0:4:10, -1:.1:1, -5:.1:1 ); julia> Data_set2= InterpolateDataFields(Data_set1, X,Y,Z) ``` @@ -765,7 +950,7 @@ function InterpolateDataFields(V::AbstractGeneralGrid, Lon, Lat, Depth) for i = 1:length(V.fields) if typeof(V.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(size(Lon,1),size(Lon,2),size(Lon,3),length(data_tuple)); # create a 3D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); @@ -776,7 +961,7 @@ function InterpolateDataFields(V::AbstractGeneralGrid, Lon, Lat, Depth) else interpol = linear_interpolation((Lon_vec, Lat_vec, Depth_vec), ustrip.(data_tuple[j]),extrapolation_bc = NaN); # create interpolation object end - data_array[:,:,:,j] = interpol.(Lon, Lat, ustrip.(Depth)); + data_array[:,:,:,j] = interpol.(Lon, Lat, ustrip.(Depth)); end data_new = tuple([data_array[:,:,:,c] for c in 1:size(data_array,4)]...) # transform 3D matrix to tuple @@ -789,14 +974,17 @@ function InterpolateDataFields(V::AbstractGeneralGrid, Lon, Lat, Depth) interpol = linear_interpolation((Lon_vec, Lat_vec, Depth_vec), V.fields[i], extrapolation_bc = NaN); # create interpolation object end data_new = interpol.(Lon, Lat, ustrip.(Depth)); # interpolate data field + if isa(V.fields[i][1],Int64) + data_new = round.(Int64,data_new) + end end - - # replace the one + + # replace the one new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end - + # Create a GeoData struct with the newly interpolated fields if isa(V,GeoData) @@ -831,7 +1019,7 @@ function InterpolateDataFields(V::UTMData, EW, NS, Depth) for i = 1:length(V.fields) if typeof(V.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(size(EW,1),size(EW,2),size(EW,3),length(data_tuple)); # create a 3D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); @@ -842,7 +1030,7 @@ function InterpolateDataFields(V::UTMData, EW, NS, Depth) else interpol = linear_interpolation((EW_vec, NS_vec, Depth_vec), ustrip.(data_tuple[j]),extrapolation_bc = NaN); # create interpolation object end - data_array[:,:,:,j] = interpol.(EW, NS, Depth); + data_array[:,:,:,j] = interpol.(EW, NS, Depth); end data_new = tuple([data_array[:,:,:,c] for c in 1:size(data_array,4)]...) # transform 3D matrix to tuple @@ -856,13 +1044,13 @@ function InterpolateDataFields(V::UTMData, EW, NS, Depth) end data_new = interpol.(EW, NS, Depth); # interpolate data field end - - # replace the one + + # replace the one new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end - + # Create a GeoData struct with the newly interpolated fields Data_profile = UTMData(EW, NS, Depth, fields_new); @@ -879,13 +1067,13 @@ function InterpolateDataFields2D(V::GeoData, Lon, Lat) Lon_vec = V.lon.val[:,1,1]; Lat_vec = V.lat.val[1,:,1]; - + fields_new = V.fields; field_names = keys(fields_new); for i = 1:length(V.fields) if typeof(V.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(size(Lon,1),size(Lon,2),size(Lon,3),length(data_tuple)); # create a 3D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); @@ -895,7 +1083,7 @@ function InterpolateDataFields2D(V::GeoData, Lon, Lat) else interpol = linear_interpolation((Lon_vec, Lat_vec), ustrip.(data_tuple[j]),extrapolation_bc = Flat()); # create interpolation object end - data_array[:,:,1,j] = interpol.(Lon, Lat); + data_array[:,:,1,j] = interpol.(Lon, Lat); end if length(size(data_tuple[1]))==3 data_new = tuple([data_array[:,:,:,c] for c in 1:size(data_array,4)]...) # transform 3D matrix to tuple @@ -912,11 +1100,11 @@ function InterpolateDataFields2D(V::GeoData, Lon, Lat) data_new = interpol.(Lon, Lat); # interpolate data field end - - # replace the one + + # replace the one new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end # Interpolate z-coordinate as well @@ -925,8 +1113,8 @@ function InterpolateDataFields2D(V::GeoData, Lon, Lat) else interpol = linear_interpolation((Lon_vec, Lat_vec), V.depth.val, extrapolation_bc = Flat()); # create interpolation object end - depth_new = interpol.(Lon, Lat); - + depth_new = interpol.(Lon, Lat); + # Create a GeoData struct with the newly interpolated fields # Data_profile = GeoData(Lon, Lat, Depth*0, fields_new); @@ -934,56 +1122,130 @@ function InterpolateDataFields2D(V::GeoData, Lon, Lat) return depth_new, fields_new end + """ InterpolateDataFields2D(V::UTMData, EW, NS) Interpolates a data field `V` on a 2D grid defined by `UTM`. Typically used for horizontal surfaces """ function InterpolateDataFields2D(V::UTMData, EW, NS) - EW_vec = V.EW.val[:,1,1]; NS_vec = V.NS.val[1,:,1]; - - fields_new = V.fields; + return InterpolateDataFields2D_vecs(EW_vec, NS_vec, V.depth, V.fields, EW, NS) +end + +""" + InterpolateDataFields2D(V::CartData, X, Y) + +Interpolates a data field `V` on a 2D CartData grid defined by `X`,`Y`. Typically used for horizontal surfaces +""" +function InterpolateDataFields2D(V::CartData, X, Y) + X_vec = V.x.val[:,1,1]; + Y_vec = V.y.val[1,:,1]; + return InterpolateDataFields2D_vecs(X_vec, Y_vec, V.z, V.fields, X, Y) +end + +""" + InterpolateDataFields2D(Original::CartData, New::CartData; Rotate=0.0, Translate=(0,0,0), Scale=(1.0,1.0,1.0)) + +Interpolates a data field `Original` on a 2D CartData grid `New`. +Typically used for horizontal surfaces. + +Note: `Original` should have orthogonal coordinates. If it has not, e.g., because it was rotated, you'll have to specify the angle `Rotate` that it was rotated by + +""" +function InterpolateDataFields2D(Original::CartData, New::CartData; Rotate=0.0, Translate=(0.0,0.0,0.0), Scale=(1.0,1.0,1.0)) + if (Rotate!=0.0) || any(Translate .!= (0,0,0)) || any(Scale .!= (1.0,1.0,1.0)) + Original_r = RotateTranslateScale(Original, Rotate = -1.0*Rotate, Translate = -1.0.*Translate, Scale=Scale); + New_r = RotateTranslateScale(New, Rotate = -1.0*Rotate, Translate = -1.0.*Translate, Scale=Scale); + else + Original_r = Original; + New_r = New; + end + + X_vec = Original_r.x.val[:,1,1]; + Y_vec = Original_r.y.val[1,:,1]; + + Xnew = New_r.x.val + Ynew = New_r.y.val + Znew, fields_new = GeophysicalModelGenerator.InterpolateDataFields2D_vecs(X_vec, Y_vec, Original_r.z, Original_r.fields, Xnew, Ynew) + + return CartData(New.x.val,New.y.val,Znew, fields_new) +end + + +""" + InterpolateDataFields2D_vecs(x_vec, y_vec, depth, fields_new, X, Y) + +Interpolates a data field `V` on a 2D grid defined by `UTM`. Typically used for horizontal surfaces +""" +function InterpolateDataFields2D_vecs(EW_vec, NS_vec, depth, fields_new, EW, NS) + + # fields_new = V.fields; field_names = keys(fields_new); - for i = 1:length(V.fields) - if typeof(V.fields[i]) <: Tuple + for i = 1:length(fields_new) + data_tuple = fields_new[i] + + if (typeof(data_tuple) <: Tuple) & (!contains(String(field_names[i]),"colors")) # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(size(EW,1),size(EW,2),size(EW,3),length(data_tuple)); # create a 3D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); for j=1:length(data_tuple) interpol = linear_interpolation((EW_vec, NS_vec), ustrip.(data_tuple[j]),extrapolation_bc = Flat()); # create interpolation object - data_array[:,:,1,j] = interpol.(EW, NS); + data_array[:,:,1,j] = interpol.(EW, NS); + end + data_new = tuple([data_array[:,:,1,c] for c in 1:size(data_array,4)]...) # transform 3D matrix to tuple + + elseif contains(String(field_names[i]),"colors") + # This is a 3D matrix. We need to interpolate each color channel separately, while using nearest neighbour + # as you don't want to average colors + + # use nearest neighbour to interpolate data + X,Y,_ = XYZGrid(EW_vec, NS_vec, depth.val[1]); + + coord = [vec(X)'; vec(Y)']; + kdtree = KDTree(coord; leafsize = 10); + points = [vec(EW)';vec(NS)']; + idx,dist = nn(kdtree, points); + I = CartesianIndices(axes(X)) + I_idx = I[idx] + I_loc = CartesianIndices(axes(EW)) + + data_array = zeros(size(EW,1),size(EW,2),size(EW,3),length(data_tuple)); # create a 3D array that holds the 2D interpolated values + for (n,i) in enumerate(I_loc) + ix,iy = i[1],i[2] + for j=1:length(data_tuple) + data_array[ix,iy,1,j] = data_tuple[j][I_idx[n]] + end end data_new = tuple([data_array[:,:,1,c] for c in 1:size(data_array,4)]...) # transform 3D matrix to tuple else # scalar field - if length(size(V.fields[i]))==3 - interpol = linear_interpolation((EW_vec, NS_vec), V.fields[i][:,:,1], extrapolation_bc = Flat()); # create interpolation object + if length(size(data_tuple))==3 + interpol = linear_interpolation((EW_vec, NS_vec), data_tuple[:,:,1], extrapolation_bc = Flat()); # create interpolation object else - interpol = linear_interpolation((EW_vec, NS_vec), V.fields[i], extrapolation_bc = Flat()); # create interpolation object + interpol = linear_interpolation((EW_vec, NS_vec), data_tuple, extrapolation_bc = Flat()); # create interpolation object end data_new = interpol.(EW, NS); # interpolate data field end - - # replace the one + + # replace the one new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end # Interpolate z-coordinate as well - if length(size(V.depth))==3 - interpol = linear_interpolation((EW_vec, NS_vec), V.depth.val[:,:,1], extrapolation_bc = Flat()); # create interpolation object + if length(size(depth))==3 + interpol = linear_interpolation((EW_vec, NS_vec), depth.val[:,:,1], extrapolation_bc = Flat()); # create interpolation object else - interpol = linear_interpolation((EW_vec, NS_vec), V.depth.val, extrapolation_bc = Flat()); # create interpolation object + interpol = linear_interpolation((EW_vec, NS_vec), depth.val, extrapolation_bc = Flat()); # create interpolation object end - depth_new = interpol.(EW, NS); - + depth_new = interpol.(EW, NS); + # Create a UTMData struct with the newly interpolated fields # Data_profile = UTMData(EW, NS, Depth*0, fields_new); @@ -995,25 +1257,25 @@ end """ Surf_interp = InterpolateDataOnSurface(V::ParaviewData, Surf::ParaviewData) -Interpolates a 3D data set `V` on a surface defined by `Surf`. nex +Interpolates a 3D data set `V` on a surface defined by `Surf`. # Example ```julia julia> Data -ParaviewData +ParaviewData size : (33, 33, 33) x ϵ [ -3.0 : 3.0] y ϵ [ -2.0 : 2.0] z ϵ [ -2.0 : 0.0] fields: (:phase, :density, :visc_total, :visc_creep, :velocity, :pressure, :temperature, :dev_stress, :strain_rate, :j2_dev_stress, :j2_strain_rate, :plast_strain, :plast_dissip, :tot_displ, :yield, :moment_res, :cont_res) julia> surf -ParaviewData +ParaviewData size : (96, 96, 1) x ϵ [ -2.9671875 : 3.2671875] y ϵ [ -1.9791666666666667 : 1.9791666666666667] z ϵ [ -1.5353766679763794 : -0.69925457239151] fields: (:Depth,) julia> Surf_interp = InterpolateDataOnSurface(Data, surf) - ParaviewData + ParaviewData size : (96, 96, 1) x ϵ [ -2.9671875 : 3.2671875] y ϵ [ -1.9791666666666667 : 1.9791666666666667] @@ -1022,7 +1284,7 @@ julia> Surf_interp = InterpolateDataOnSurface(Data, surf) ``` """ function InterpolateDataOnSurface(V::ParaviewData, Surf::ParaviewData) - + # Create GeoData structure: V_geo = GeoData(V.x.val, V.y.val, V.z.val, V.fields) V_geo.depth.val = ustrip(V_geo.depth.val); @@ -1037,6 +1299,21 @@ function InterpolateDataOnSurface(V::ParaviewData, Surf::ParaviewData) end +function InterpolateDataOnSurface(V::CartData, Surf::CartData) + + # Create GeoData structure: + V_geo = GeoData(V.x.val, V.y.val, V.z.val, V.fields) + V_geo.depth.val = ustrip(V_geo.depth.val); + + Surf_geo = GeoData(Surf.x.val, Surf.y.val, Surf.z.val, Surf.fields) + Surf_geo.depth.val = ustrip(Surf_geo.depth.val); + + Surf_interp_geo = InterpolateDataOnSurface(V_geo, Surf_geo) + Surf_interp = CartData(Surf_interp_geo.lon.val, Surf_interp_geo.lat.val, ustrip.(Surf_interp_geo.depth.val), Surf_interp_geo.fields) + + return Surf_interp + +end """ Surf_interp = InterpolateDataOnSurface(V::GeoData, Surf::GeoData) @@ -1044,7 +1321,7 @@ end Interpolates a 3D data set `V` on a surface defined by `Surf` """ function InterpolateDataOnSurface(V::GeoData, Surf::GeoData) - + Surf_interp = InterpolateDataFields(V, Surf.lon.val, Surf.lat.val, Surf.depth.val) return Surf_interp @@ -1061,7 +1338,7 @@ function ExtractDataSets(V::AbstractGeneralGrid, iLon, iLat, iDepth) Lon = zeros(typeof(X[1]), length(iLon),length(iLat),length(iDepth)); Lat = zeros(typeof(Y[1]), length(iLon),length(iLat),length(iDepth)); Depth = zeros(typeof(Z[1]), length(iLon),length(iLat),length(iDepth)); - + iLo = 1:length(iLon); iLa = 1:length(iLat); iDe = 1:length(iDepth) @@ -1074,13 +1351,13 @@ function ExtractDataSets(V::AbstractGeneralGrid, iLon, iLat, iDepth) for i = 1:length(V.fields) if typeof(V.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = fields_new[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(typeof(data_tuple[1][1]),length(iLon),length(iLat),length(iDepth),length(data_tuple)); # create a 3D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); for j=1:length(data_tuple) data_field = data_tuple[j]; - data_array[:,:,:,j] = data_field[iLon, iLat, iDepth]; + data_array[:,:,:,j] = data_field[iLon, iLat, iDepth]; end data_new = tuple([data_array[:,:,:,c] for c in 1:size(data_array,4)]...) # transform 4D matrix to tuple @@ -1089,13 +1366,13 @@ function ExtractDataSets(V::AbstractGeneralGrid, iLon, iLat, iDepth) data_new = zeros(typeof(V.fields[i][1]), length(iLon),length(iLat),length(iDepth)); data_new[iLo,iLa,iDe] = V.fields[i][iLon, iLat, iDepth] # interpolate data field end - - # replace the one + + # replace the one new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name fields_new = merge(fields_new, new_field); # replace the field in fields_new - + end - + # Create a GeoData struct with the newly interpolated fields if isa(V,GeoData) @@ -1125,15 +1402,15 @@ function SubtractHorizontalMean(V::AbstractArray{T, 3}; Percentage=false) where if Percentage V_sub = zeros(size(V)); # no units else - V_sub = zeros(typeof(V[1]), size(V)); + V_sub = zeros(typeof(V[1]), size(V)); end for iLayer = 1:NumLayers average = mean(filter(!isnan, vec(V[:,:,iLayer]))); - + if Percentage V_sub[:,:,iLayer] = ustrip(V[:,:,iLayer]) .- ustrip(average); - V_sub[:,:,iLayer] = V_sub[:,:,iLayer]./ustrip(average)*100.0; # the result is normalized + V_sub[:,:,iLayer] = V_sub[:,:,iLayer]./ustrip(average)*100.0; # the result is normalized else V_sub[:,:,iLayer] = V[:,:,iLayer] .- average; end @@ -1158,15 +1435,15 @@ function SubtractHorizontalMean(V::AbstractArray{T, 2}; Percentage=false) where if Percentage V_sub = zeros(size(V)); # no units else - V_sub = zeros(typeof(V[1]), size(V)); + V_sub = zeros(typeof(V[1]), size(V)); end for iLayer = 1:NumLayers average = mean(filter(!isnan, vec(V[:,iLayer]))); - + if Percentage V_sub[:,iLayer] = ustrip(V[:,iLayer]) .- ustrip(average); - V_sub[:,iLayer] = V_sub[:,iLayer]./ustrip(average)*100.0; # the result is normalized + V_sub[:,iLayer] = V_sub[:,iLayer]./ustrip(average)*100.0; # the result is normalized else V_sub[:,iLayer] = V[:,iLayer] .- average; end @@ -1179,15 +1456,15 @@ end -""" +""" ParseColumns_CSV_File(data_file, num_columns) This parses numbers from CSV file that is read in with `CSV.File`. -That is useful in case the CSV files has tables that contain both strings (e.g., station names) and numbers (lat/lon/height) and you are only intested in the numbers +That is useful in case the CSV files has tables that contain both strings (e.g., station names) and numbers (lat/lon/height) and you are only interested in the numbers # Example -This example assumes that the data starts at line 18, that the colums are separated by spaces, and that it contains at most 4 columns with data: +This example assumes that the data starts at line 18, that the columns are separated by spaces, and that it contains at most 4 columns with data: ```julia-repl julia> using CSV julia> data_file = CSV.File("FileName.txt",datarow=18,header=false,delim=' ') @@ -1196,7 +1473,7 @@ julia> data = ParseColumns_CSV_File(data_file, 4) """ function ParseColumns_CSV_File(data_file, num_columns) - data = zeros(size(data_file,1), num_columns); + data = zeros(size(data_file,1), num_columns); for (row_num,row) in enumerate(data_file) num = 0; for i=1:length(row) @@ -1204,7 +1481,7 @@ function ParseColumns_CSV_File(data_file, num_columns) num += 1; data[row_num,num] = row[i] else - + try parse(Float64,row[i]) num += 1; data[row_num,num] = parse(Float64,row[i]) @@ -1218,7 +1495,7 @@ function ParseColumns_CSV_File(data_file, num_columns) end -""" +""" AboveSurface(Data::GeoData, DataSurface::GeoData; above=true) Returns a boolean array of size(Data.Lon), which is true for points that are above the surface DataSurface (or for points below if above=false). @@ -1229,17 +1506,17 @@ This can be used, for example, to mask points above/below the Moho in a volumetr First we create a 3D data set and a 2D surface: ```julia julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,(-300:25:0)km); -julia> Data = Depth*2; +julia> Data = Depth*2; julia> Data_set3D = GeoData(Lon,Lat,Depth,(Depthdata=Data,LonData=Lon)) -GeoData +GeoData size : (11, 11, 13) lon ϵ [ 10.0 : 20.0] lat ϵ [ 30.0 : 40.0] depth ϵ [ -300.0 km : 0.0 km] fields: (:Depthdata, :LonData) -julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,-40km); +julia> Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,-40km); julia> Data_Moho = GeoData(Lon,Lat,Depth+Lon*km, (MohoDepth=Depth,)) - GeoData + GeoData size : (11, 11, 1) lon ϵ [ 10.0 : 20.0] lat ϵ [ 30.0 : 40.0] @@ -1248,13 +1525,13 @@ julia> Data_Moho = GeoData(Lon,Lat,Depth+Lon*km, (MohoDepth=Depth,)) ``` Next, we intersect the surface with the data set: ```julia -julia> Above = AboveSurface(Data_set3D, Data_Moho); +julia> Above = AboveSurface(Data_set3D, Data_Moho); ``` Now, `Above` is a boolean array that is true for points above the surface and false for points below and at the surface. """ function AboveSurface(Data::GeoData, DataSurface::GeoData; above=true) - + if size(DataSurface.lon)[3]!=1 error("It seems that DataSurface is not a surface") end @@ -1365,20 +1642,20 @@ The way it works is: - Filter data points in one model (e.g., areas with a velocity anomaly > 2 percent). Set everything that satisfies this criteria to 1 and everything else to 0. - Sum the results of the different datasets -If a feature is consistent between different datasets, it will have larger values. +If a feature is consistent between different datasets, it will have larger values. # Example We assume that we have 2 seismic velocity datasets `Data_Zhao_Pwave` and `DataKoulakov_Alps`: ```julia julia> Data_Zhao_Pwave -GeoData +GeoData size : (121, 94, 101) lon ϵ [ 0.0 : 18.0] lat ϵ [ 38.0 : 51.95] depth ϵ [ -1001.0 km : -1.0 km] fields: (:dVp_Percentage,) julia> DataKoulakov_Alps - GeoData + GeoData size : (108, 81, 35) lon ϵ [ 4.0 : 20.049999999999997] lat ϵ [ 37.035928143712574 : 49.01197604790419] @@ -1386,9 +1663,9 @@ julia> DataKoulakov_Alps fields: (:dVp_percentage, :dVs_percentage) ``` You can create a VoteMap which combines the two data sets with: -```julia +```julia julia> Data_VoteMap = VoteMap([Data_Zhao_Pwave,DataKoulakov_Alps],["dVp_Percentage>2.5","dVp_percentage>3.0"]) -GeoData +GeoData size : (50, 50, 50) lon ϵ [ 4.0 : 18.0] lat ϵ [ 38.0 : 49.01197604790419] @@ -1397,9 +1674,9 @@ GeoData ``` You can also create a VoteMap of a single dataset: -```julia +```julia julia> Data_VoteMap = VoteMap(Data_Zhao_Pwave,"dVp_Percentage>2.5", dims=(50,51,52)) -GeoData +GeoData size : (50, 51, 52) lon ϵ [ 0.0 : 18.0] lat ϵ [ 38.0 : 51.95] @@ -1415,7 +1692,7 @@ function VoteMap(DataSets::Vector{GeoData}, criteria::Vector{String}; dims=(50,5 if length(criteria) != numDataSets error("Need the same number of criteria as the number of data sets") end - + # Determine the overlapping lon/lat/depth regions of all datasets lon_limits = [minimum(DataSets[1].lon.val); maximum(DataSets[1].lon.val)]; lat_limits = [minimum(DataSets[1].lat.val); maximum(DataSets[1].lat.val)]; @@ -1426,7 +1703,7 @@ function VoteMap(DataSets::Vector{GeoData}, criteria::Vector{String}; dims=(50,5 lat_limits[1] = maximum([lat_limits[1] minimum(DataSets[i].lat.val)]); lat_limits[2] = minimum([lat_limits[2] maximum(DataSets[i].lat.val)]); - + z_limits[1] = maximum([z_limits[1] minimum(DataSets[i].depth.val)]); z_limits[2] = minimum([z_limits[2] maximum(DataSets[i].depth.val)]); end @@ -1435,7 +1712,7 @@ function VoteMap(DataSets::Vector{GeoData}, criteria::Vector{String}; dims=(50,5 VoteMap = zeros(Int64,dims) for i=1:numDataSets VoteMap_Local = zeros(Int64,dims) - + # Interpolate data set to smaller domain DataSet = ExtractSubvolume(DataSets[i]; Interpolate=true, Lon_level=lon_limits, Lat_level=lat_limits, Depth_level=z_limits, dims=dims); @@ -1448,10 +1725,10 @@ function VoteMap(DataSets::Vector{GeoData}, criteria::Vector{String}; dims=(50,5 end Array3D = ustrip.(DataSet.fields[expr.args[2]]); # strip units, just in case - - # Modify the value, to be Array3D + + # Modify the value, to be Array3D expr_mod = Expr(:call, expr.args[1], :($Array3D), expr.args[3]); # modify the original expression to use Array3D as variable name - + # The expression should have a ".", such as Array .> 1.0. If not, it will not apply this in a pointwise manner # Here, we add this dot if it is not there yet if cmp(String(expr_mod.args[1])[1],Char('.'))==1 @@ -1461,7 +1738,7 @@ function VoteMap(DataSets::Vector{GeoData}, criteria::Vector{String}; dims=(50,5 ind = eval(expr_mod); # evaluate the modified expression VoteMap_Local[ind] .= 1; # assign vote-map - VoteMap = VoteMap + VoteMap_Local; # Sum + VoteMap = VoteMap + VoteMap_Local; # Sum end DataSet = ExtractSubvolume(DataSets[1], Interpolate=true, Lon_level=lon_limits, Lat_level=lat_limits, Depth_level=z_limits, dims=dims); @@ -1478,21 +1755,22 @@ function VoteMap(DataSets::GeoData, criteria::String; dims=(50,50,50)) end """ - Data_R = RotateTranslateScale(Data::ParaviewData; Rotate=0, Translate=(0,0,0), Scale=(1.0,1.0,1.0)) + Data_R = RotateTranslateScale(Data::Union{ParaviewData, CartData}; Rotate=0, Translate=(0,0,0), Scale=(1.0,1.0,1.0), Xc=(0.0,0.0)) -Does an in-place rotation, translation and scaling of the Cartesian dataset `Data`. +Does an in-place rotation, translation and scaling of the Cartesian dataset `Data`. # Parameters Note that we apply the transformations in exactly this order: - `Scale`: scaling applied to the `x,y,z` coordinates of the data set - `Rotate`: rotation around the `x/y` axis (around the center of the box) - `Translate`: translation +- `Xc`: center of rotation # Example ```julia julia> X,Y,Z = XYZGrid(10:20,30:40,-50:-10); julia> Data_C = ParaviewData(X,Y,Z,(Depth=Z,)) -ParaviewData +ParaviewData size : (11, 11, 41) x ϵ [ 10.0 : 20.0] y ϵ [ 30.0 : 40.0] @@ -1500,7 +1778,7 @@ ParaviewData fields: (:Depth,) julia> Data_R = RotateTranslateScale(Data_C, Rotate=30); julia> Data_R -ParaviewData +ParaviewData size : (11, 11, 41) x ϵ [ 8.169872981077807 : 21.83012701892219] y ϵ [ 28.16987298107781 : 41.83012701892219] @@ -1508,14 +1786,14 @@ ParaviewData fields: (:Depth,) ``` """ -function RotateTranslateScale(Data::ParaviewData; Rotate=0, Translate=(0,0,0), Scale=(1.0,1.0,1.0)) +function RotateTranslateScale(Data::Union{ParaviewData, CartData}; Rotate::Number=0.0, Translate=(0,0,0), Scale=(1.0,1.0,1.0), Xc=(0.0,0.0)) - X,Y,Z = Data.x.val, Data.y.val, Data.z.val; # Extract coordinates - Xr,Yr,Zr = X,Y,Z; # Rotated coordinates + X,Y,Z = copy(Data.x.val), copy(Data.y.val), copy(Data.z.val); # Extract coordinates + Xr,Yr,Zr = X,Y,Z; # Rotated coordinates # 1) Scaling if length(Scale)==1 - Scale = [Scale Scale Scale]; + Scale = [Scale, Scale, Scale]; end Xr .*= Scale[1]; Yr .*= Scale[2]; @@ -1523,39 +1801,41 @@ function RotateTranslateScale(Data::ParaviewData; Rotate=0, Translate=(0,0,0), S # 2) 2D rotation around X/Y axis, around center of box - Xm,Ym = mean(X), mean(Y); - R = [cosd(Rotate[1]) -sind(Rotate[1]); sind(Rotate[1]) cosd(Rotate[1])]; # 2D rotation matrix + Xm,Ym = 0.0, 0.0; + R = [cosd(Rotate) -sind(Rotate); sind(Rotate) cosd(Rotate)]; # 2D rotation matrix for i in eachindex(X) - Rot_XY = R*[X[i]-Xm; Y[i]-Ym]; - Xr[i] = Rot_XY[1] + Xm; - Yr[i] = Rot_XY[2] + Ym; - end - + Rot_XY = R*[X[i]-Xc[1]; Y[i]-Xc[2]]; + Xr[i] = Rot_XY[1] + Xc[1]; + Yr[i] = Rot_XY[2] + Xc[2]; + end + # 3) Add translation Xr .+= Translate[1]; Yr .+= Translate[2]; Zr .+= Translate[3]; - + # Modify original structure - #Data.x.val = Xr; - #Data.y.val = Yr; - #Data.z.val = Zr; - - return ParaviewData(Xr,Yr,Zr, Data.fields) + if isa(Data, ParaviewData) + Data_r = ParaviewData(Xr,Yr,Zr, Data.fields) + else + Data_r = CartData(Xr,Yr,Zr, Data.fields) + end + + return Data_r end """ - Topo = DrapeOnTopo(Topo::GeoData, Data::GeoData) + Topo = DrapeOnTopo(Topo::GeoData, Data::GeoData) -This drapes fields of a data set `Data` on the topography `Topo` +This drapes fields of a data set `Data` on the topography `Topo` """ function DrapeOnTopo(Topo::GeoData, Data::GeoData) - + Lon,Lat,Depth = LonLatDepthGrid( Topo.lon.val[:,1,1], Topo.lat.val[1,:,1],Topo.depth.val[1,1,:]); # use nearest neighbour to interpolate data @@ -1567,50 +1847,50 @@ function DrapeOnTopo(Topo::GeoData, Data::GeoData) idx_out = findall( (Lon .< minimum(Data.lon.val)) .| (Lon .> maximum(Data.lon.val)) .| (Lat .< minimum(Data.lat.val)) .| (Lat .> maximum(Data.lat.val)) ) - + fields_new = Topo.fields; field_names = keys(Data.fields); - + for i = 1:length(Data.fields) - + if typeof(Data.fields[i]) <: Tuple # vector or anything that contains more than 1 field - data_tuple = Data.fields[i] # we have a tuple (likely a vector field), so we have to loop + data_tuple = Data.fields[i] # we have a tuple (likely a vector field), so we have to loop data_array = zeros(typeof(data_tuple[1][1]),size(Topo.lon.val,1),size(Topo.lon.val,2),size(Topo.lon.val,3),length(Data.fields[i])); # create a 3D array that holds the 2D interpolated values unit_array = zeros(size(data_array)); for j=1:length(data_tuple) data_field = data_tuple[j]; - tmp = data_array[:,:,:,1]; + tmp = data_array[:,:,:,1]; tmp = data_field[idx] data_array[:,:,:,j] = tmp end - + data_new = tuple([data_array[:,:,:,c] for c in 1:size(data_array,4)]...) # transform 4D matrix to tuple # remove points outside domain for j=1:length(data_tuple) - tmp = data_new[j]; + tmp = data_new[j]; tmp[idx_out] .= NaN data_array[:,:,:,j] = tmp end data_new = tuple([data_array[:,:,:,c] for c in 1:size(data_array,4)]...) # transform 4D matrix to tuple else - + # scalar field data_new = zeros(typeof(Data.fields[i][1]), size(Topo.lon.val,1),size(Topo.lon.val,2),size(Topo.lon.val,3)); data_new = Data.fields[i][idx] # interpolate data field - + end - - # replace the one + + # replace the one new_field = NamedTuple{(field_names[i],)}((data_new,)) # Create a tuple with same name fields_new = merge(fields_new, new_field); # replace the field in fields_new - - end + + end Topo_new = GeoData(Topo.lon.val,Topo.lat.val,Topo.depth.val, fields_new) @@ -1620,10 +1900,10 @@ function DrapeOnTopo(Topo::GeoData, Data::GeoData) end -""" +""" DrapeOnTopo(Topo::CartData, Data::CartData) -Drapes Cartesian Data on topography +Drapes Cartesian Data on topography """ function DrapeOnTopo(Topo::CartData, Data::CartData) Topo_lonlat = GeoData(ustrip.(Topo.x.val),ustrip.(Topo.y.val), ustrip.(Topo.z.val), Topo.fields ) @@ -1636,20 +1916,98 @@ function DrapeOnTopo(Topo::CartData, Data::CartData) return Topo_new end -""" +""" LithostaticPressure!(Plithos::Array, Density::Array, dz::Number; g=9.81) Computes lithostatic pressure from a 3D density array, assuming constant soacing `dz` in vertical direction. Optionally, the gravitational acceleration `g` can be specified. """ function LithostaticPressure!(Plithos::Array{T,N}, Density::Array{T,N}, dz::Number; g=9.81) where {T,N} - + Plithos[:] = Density*dz*g; - + selectdim(Plithos,N,size(Plithos)[N]) .= 0 # set upper row to zero - + Plithos[:] = reverse!(cumsum(reverse!(Plithos),dims=N)) - + + return nothing +end + + +""" + AddSurfaces!(Surface1::Union{GeoData,UTMData}, Surface2::Union{GeoData,UTMData}) + +Adds `Surface2` to `Surface1`. The addition happens on the `Surface1.depth`; the fields remain unchanged + +""" +function AddSurfaces!(Surface1::Union{GeoData,UTMData}, Surface2::Union{GeoData,UTMData}) + + Surface1.depth.val .= Surface1.depth.val + Surface2.depth.val + + return nothing +end + +""" + AddSurfaces!(Surface1::Union{CartData,ParaviewData}, Surface2::Union{CartData,ParaviewData}) + +Adds `Surface2` to `Surface1`. The addition happens on the `Surface1.z`; the fields remain unchanged + +""" +function AddSurfaces!(Surface1::Union{CartData,ParaviewData}, Surface2::Union{CartData,ParaviewData}) + + Surface1.z.val .= Surface1.z.val + Surface2.z.val + + return nothing +end + + +""" + SubtractSurfaces!(Surface1::Union{GeoData,UTMData}, Surface2::Union{GeoData,UTMData}) + +Subtracts `Surface2` to `Surface1`. The addition happens on the `Surface1.depth`; the fields remain unchanged + +""" +function SubtractSurfaces!(Surface1::Union{GeoData,UTMData}, Surface2::Union{GeoData,UTMData}) + + Surface1.depth.val .= Surface1.depth.val - Surface2.depth.val + + return nothing +end + + +""" + SubtractSurfaces!(Surface1::Union{CartData,ParaviewData}, Surface2::Union{CartData,ParaviewData}) + +Subtracts `Surface2` to `Surface1`. The addition happens on the `Surface1.z`; the fields remain unchanged + +""" +function SubtractSurfaces!(Surface1::Union{CartData,ParaviewData}, Surface2::Union{CartData,ParaviewData}) + + Surface1.z.val .= Surface1.z.val - Surface2.z.val + return nothing end + + + +""" + remove_NaN_Surface!(Z,X,Y) + +Removes NaN's from a grid `Z` by taking the closest points as specified by `X` and `Y`. +""" +function remove_NaN_Surface!(Z,X,Y) + # use nearest neighbour to interpolate data + id = findall(isnan.(Z) .== false) + id_NaN = findall(isnan.(Z)) + + coord = [X[id]'; Y[id]']; + kdtree = KDTree(coord; leafsize = 10); + + points = [X[id_NaN]'; Y[id_NaN]']; + idx,dist = nn(kdtree, points); + + Z[id_NaN] = Z[id[idx]] + + return nothing +end diff --git a/src/voxel_gravity.jl b/src/voxel_gravity.jl index 5c8eb8c6..adfd12ef 100644 --- a/src/voxel_gravity.jl +++ b/src/voxel_gravity.jl @@ -13,24 +13,24 @@ export voxGrav voxGrav(X::Array{Float64, 3}, Y::Array{Float64, 3}, Z::Array{Float64, 3}, RHO::Array{Float64, 3}; refMod="AVG", lengthUnit="m", rhoTol=1e-9, Topo=[], outName="Bouguer", printing=true) - Computes Bouguer anomalies and gradients - - Required arguments: - X,Y,Z: 3D matrices with the coordinates of the grid (X should vary in the first dimension, Y in the second, Z (vertical) in the thrid) - RHO: 3D matrix with the densitiy at each grid point [kg/m^3] - - Optional arguments: - refMod: 1D vector with the reference density for each depth - Alternatively, the strings "NE", "SE", "SW", "NW", "AVG" can be used. - In that case, one of the corners of `RHO` is used as reference model. - In case of "AVG" the reference model is the average of each depth slice. - lengthUnit: The unit of the coordinates and topography file. Either "m" or "km" - rhoTol: density differences smaller than rhoTol will be ignored [kg/m^3] - Topo: 2D matrix with the topography of the surface (only relevant for the paraview output) - outName: name of the paraview output (do not include file type) + Computes Bouguer anomalies and gradients + + Required arguments: + X,Y,Z: 3D matrices with the coordinates of the grid (X should vary in the first dimension, Y in the second, Z (vertical) in the third) + RHO: 3D matrix with the density at each grid point [kg/m^3] + + Optional arguments: + refMod: 1D vector with the reference density for each depth + Alternatively, the strings "NE", "SE", "SW", "NW", "AVG" can be used. + In that case, one of the corners of `RHO` is used as reference model. + In case of "AVG" the reference model is the average of each depth slice. + lengthUnit: The unit of the coordinates and topography file. Either "m" or "km" + rhoTol: density differences smaller than rhoTol will be ignored [kg/m^3] + Topo: 2D matrix with the topography of the surface (only relevant for the paraview output) + outName: name of the paraview output (do not include file type) printing: activate printing of additional information [true or false] """ -function voxGrav(X::Array{Float64, 3}, Y::Array{Float64, 3}, Z::Array{Float64, 3}, RHO::Array{Float64, 3}; +function voxGrav(X::Array{Float64, 3}, Y::Array{Float64, 3}, Z::Array{Float64, 3}, RHO::Array{Float64, 3}; refMod="AVG", lengthUnit="m", rhoTol=1e-9, Topo=[], outName="Bouguer", printing=true) ## check input @@ -58,13 +58,13 @@ function voxGrav(X::Array{Float64, 3}, Y::Array{Float64, 3}, Z::Array{Float64, 3 ny = size(X,2); nz = size(X,3); - # substract reference model + # subtract reference model for i = 1 : nz RHO[:,:,i] .= RHO[:,:,i] .- RefMod[i] end # interpolate density grid to cell centers - DRHO = RHO[1:end-1,1:end-1,1:end-1] .+ RHO[2:end,1:end-1,1:end-1] .+ RHO[2:end,2:end,1:end-1] .+ RHO[1:end-1,2:end,1:end-1] + + DRHO = RHO[1:end-1,1:end-1,1:end-1] .+ RHO[2:end,1:end-1,1:end-1] .+ RHO[2:end,2:end,1:end-1] .+ RHO[1:end-1,2:end,1:end-1] + RHO[1:end-1,1:end-1,2:end] .+ RHO[2:end,1:end-1,2:end] .+ RHO[2:end,2:end,2:end] .+ RHO[1:end-1,2:end,2:end] DRHO = DRHO ./ 8 @@ -119,7 +119,7 @@ function voxGrav(X::Array{Float64, 3}, Y::Array{Float64, 3}, Z::Array{Float64, 3 coords = permutedims(coords,[4,1,2,3]) vtkfile = vtk_grid(outName,coords) - if printing + if printing @printf "%.3f %% of the domain contained anomalous densities. If this is more than expected, adjust rhoTol for faster computation.\n" 100*frac @printf "Writing output...\n" vtkfile["Bouguer Anomaly [mGal]"] = dg @@ -137,7 +137,7 @@ function voxGrav(X::Array{Float64, 3}, Y::Array{Float64, 3}, Z::Array{Float64, 3 if orient == 1 return dg, gradX, gradY - else + else return permutedims(dg, [2,1]), permutedims(gradX, [2,1]), permutedims(gradY, [2,1]) end end @@ -208,7 +208,7 @@ function checkInput(X, Y, Z, RHO, refMod, lengthUnit, rhoTol, Topo, outName, pri else error("lengthUnit should be \"m\" or \"km\".") end - + # reference model if !(typeof(refMod) == String) if length(refMod) == nz @@ -225,7 +225,7 @@ function checkInput(X, Y, Z, RHO, refMod, lengthUnit, rhoTol, Topo, outName, pri RefMod = RHO[1,1,:] elseif refMod == "NW" RefMod = RHO[1,end,:] - elseif refMode == "AVG" + elseif refMod == "AVG" RefMod = !mean([1.,1.,1.],RHO) else error("RefMod should be NE, SE, SW, NW, AVG or a vector with one value for each depth.") diff --git a/test.jld2 b/test.jld2 new file mode 100644 index 00000000..2386ca68 Binary files /dev/null and b/test.jld2 differ diff --git a/testdownload_GMG_temp.jld2 b/test/download_GMG_temp.jld2 similarity index 100% rename from testdownload_GMG_temp.jld2 rename to test/download_GMG_temp.jld2 diff --git a/test/gmt.history b/test/gmt.history new file mode 100644 index 00000000..d06156d0 --- /dev/null +++ b/test/gmt.history @@ -0,0 +1,8 @@ +# GMT 6 Session common arguments shelf +BEGIN GMT 6.5.0 +B afWSen +J X +JX X15c/0 +R 50/51/30/31 +@L 1 +END diff --git a/test/runtests.jl b/test/runtests.jl index df4ce769..174ada66 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -40,6 +40,9 @@ end include("test_ProfileProcessing.jl") end +@testset "GMT integration" begin + include("test_GMT.jl") +end # Cleanup diff --git a/test/test_GMT.jl b/test/test_GMT.jl new file mode 100644 index 00000000..55712b51 --- /dev/null +++ b/test/test_GMT.jl @@ -0,0 +1,14 @@ +using Test +using GeophysicalModelGenerator, GMT + +Topo = ImportTopo(lat=[30,31], lon=[50, 51] ) +@test sum(Topo.depth.val) ≈ 2773.3734999999997 + +Topo = ImportTopo([50,51, 30,31]); +@test sum(Topo.depth.val) ≈ 2773.3734999999997 + +test_fwd = ImportGeoTIFF("test_files/length_fwd.tif", fieldname=:forward) +@test maximum(test_fwd.fields.forward) ≈ 33.17775km + +test2 = ImportGeoTIFF("test_files/UTM2GTIF.TIF") +@test test2.fields.layer1[20,20] == 105.0 \ No newline at end of file diff --git a/test/test_IO.jl b/test/test_IO.jl index 12929173..d760d9a2 100644 --- a/test/test_IO.jl +++ b/test/test_IO.jl @@ -1,13 +1,15 @@ using Test +pkg_dir = pkgdir(GeophysicalModelGenerator) + # test saving to file Lon3D,Lat3D,Depth3D = LonLatDepthGrid(1.0:3:10.0, 11.0:4:20.0, (-20:5:-10)*km); Data_set = GeophysicalModelGenerator.GeoData(Lon3D,Lat3D,Depth3D,(DataFieldName=Depth3D,)) -@test save_GMG("test",Data_set) == nothing +@test save_GMG(joinpath(pkg_dir,"test"),Data_set) == nothing # loading from local file -data_local = load_GMG("test") +data_local = load_GMG(joinpath(pkg_dir,"test")) @test data_local.depth.val[20]==-15.0 # loading from local file diff --git a/test/test_ProfileProcessing.jl b/test/test_ProfileProcessing.jl index f06815bc..bc5b3888 100644 --- a/test/test_ProfileProcessing.jl +++ b/test/test_ProfileProcessing.jl @@ -1,6 +1,9 @@ using Test using GeophysicalModelGenerator +pkg_dir = pkgdir(GeophysicalModelGenerator) +cd(joinpath(pkg_dir,"test")) + # Test profile processing dataset routines data_Surf = GMG_Dataset("Mrozek_Moho_Grid_EU","Surface","https://seafile.rlp.net/f/483d9c7c808a4087ba9e/?dl=1", true) @test data_Surf.DirName == "https://seafile.rlp.net/f/483d9c7c808a4087ba9e/?dl=1" @@ -12,13 +15,14 @@ data_Surf = GMG_Dataset("Mrozek_Moho_Grid_EU","Surface","https://seafile.rlp.net data_EQ = GMG_Dataset("AlpArraySeis","Point","https://seafile.rlp.net/f/87d565882eda40689666/?dl=1", true) data_SS = GMG_Dataset("Handy_etal_SE_Profile1","Screenshot","https://seafile.rlp.net/f/5ffe580e765e4bd1bafe/?dl=1", true) -# Note: the volumetric datasets are choosen as they are smaller in size (less download) +# Note: the volumetric datasets are chosen as they are smaller in size (less download) data_Vol1 = GMG_Dataset("Hua2017","Volume","https://seafile.rlp.net/f/1fb68b74e5d742d39e62/?dl=1", true) data_Vol2 = GMG_Dataset("Plomerova2022","Volume","https://seafile.rlp.net/f/abccb8d3302b4ef5af17/?dl=1", true) #data_Vol1 = GMG_Dataset("Paffrath2021","Volume","https://seafile.rlp.net/f/5c8c851af6764b5db20d/?dl=1", true) #data_Vol2 = GMG_Dataset("Zhao2016","Volume","https://seafile.rlp.net/f/e81a6d075f6746609973/?dl=1", true) -# Now load these datasets into NamedTuples +# Now load these datasets into NamedTuples + SurfData = load_GMG(data_Surf) PointData = load_GMG(data_EQ) ScreenshotData = load_GMG(data_SS) @@ -50,7 +54,7 @@ VolData_combined2 = combine_VolData(Data.Volume, dims=(50,51,52)) VolData_combined3 = combine_VolData(Data.Volume, lon=(1,22), lat=(40,52), dims=(50,51,52)) @test isnan(VolData_combined3.fields.Hua2017_Vp[1000]) -# Define horizonal & vertical profiles +# Define horizontal & vertical profiles prof1 = ProfileData(start_lonlat=(5,45), end_lonlat=(15,49)) prof2 = ProfileData(depth = -100) prof3 = ProfileData(start_lonlat=(5,45), end_lonlat=(5,49)) @@ -70,7 +74,7 @@ GeophysicalModelGenerator.CreateProfileVolume!(prof1, VolData_combined1, Depth_ GeophysicalModelGenerator.CreateProfileSurface!(prof1,Data.Surface) @test prof1.SurfData[1].fields.MohoDepth[80] ≈ -37.58791461075397km -# dito with EQ data: +# ditto with EQ data: GeophysicalModelGenerator.CreateProfilePoint!(prof1,Data.Point, section_width=5km) GeophysicalModelGenerator.CreateProfilePoint!(prof4,Data.Point, section_width=10km) @test length(prof1.PointData[1].lon) == 13 @@ -119,4 +123,4 @@ section_width=50km profile_backwards_compat = ExtractProfileData("test_files/PickedProfiles.txt",1,"test_files/AlpineData_remote.txt",DimsVolCross=DimsVolCross,DepthVol=Depth_extent,DimsSurfCross=DimsSurfCross,WidthPointProfile=section_width) -@test length(profile_backwards_compat.PointData[1].lon) == 440 \ No newline at end of file +@test length(profile_backwards_compat.PointData[1].lon) == 440 diff --git a/test/test_data_import.jl b/test/test_data_import.jl index f76edaa5..eafc94f0 100644 --- a/test/test_data_import.jl +++ b/test/test_data_import.jl @@ -28,7 +28,7 @@ using GeophysicalModelGenerator # test loading images (profiles & mapviews) -# Extact & save profile in GeoData format +# Extract & save profile in GeoData format filename = "test.png"; # fake png Corner_LowerLeft = (18.0, 51.0, -590.0) Corner_UpperRight = (9.0, 42.0, 0.0) @@ -38,6 +38,10 @@ data_Image = Screenshot_To_GeoData(filename,Corner_LowerLeft, Corner_ @test Value(data_Image.depth[1000])==-590km @test Write_Paraview(data_Image, "Profile_1")==nothing +# test if we use a different name for the color dataset +data_Image_newfieldname = Screenshot_To_GeoData(filename,Corner_LowerLeft, Corner_UpperRight, fieldname=:fake) +@test keys(data_Image_newfieldname.fields)[1] == :fake + # Test in CartData data_Image = Screenshot_To_GeoData(filename,Corner_LowerLeft, Corner_UpperRight, Cartesian=true) @test Value(data_Image.x[22]) == 18.0km @@ -74,5 +78,3 @@ data_Image = Screenshot_To_UTMData(filename,Corner_LowerLeft, Corner_ @test data_Image.EW.val[22] ≈ 0.42424242424242425 @test data_Image.NS.val[22] ≈ 48.666666666666664 @test Value(data_Image.depth[22]) ≈ -15.0m - - diff --git a/test/test_data_types.jl b/test/test_data_types.jl index 0df8d3d5..8e1a50d3 100644 --- a/test/test_data_types.jl +++ b/test/test_data_types.jl @@ -80,7 +80,9 @@ Data_set1 = GeoData(Lon,Lat,Depth,(FakeData=Data,Data2=Data.+1.)) @test Value(Data_set1.depth[1,2,3])==-8.0km # double-check that giving 3D arrays in the wrong ordering triggers a warning message -@test_throws ErrorException("It appears that the lon array has a wrong ordering") GeoData(Lat,Lon,Depth,(FakeData=Data,Data2=Data.+1.)) +Data_set2 = GeoData(Lat,Lon,Depth,(FakeData=Data,Data2=Data.+1.)) + +#@test (test_logger.logs[1].level==Warn && test_logger.logs[1].message=="It appears that the lon array has a wrong ordering") # Create 2D arrays & convert them Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,-50km); diff --git a/test/test_files/UTM2GTIF.TIF b/test/test_files/UTM2GTIF.TIF new file mode 100644 index 00000000..a0fde79e Binary files /dev/null and b/test/test_files/UTM2GTIF.TIF differ diff --git a/test/test_files/length_fwd.tif b/test/test_files/length_fwd.tif new file mode 100644 index 00000000..b957c76a Binary files /dev/null and b/test/test_files/length_fwd.tif differ diff --git a/test/test_files/length_fwd_longlat.tif.aux.xml b/test/test_files/length_fwd_longlat.tif.aux.xml new file mode 100644 index 00000000..c5b726af --- /dev/null +++ b/test/test_files/length_fwd_longlat.tif.aux.xml @@ -0,0 +1,12 @@ + + + + YES + 33171.3984375 + 12580.302107985 + 45.0491065979 + 8772.9835599453 + 32.86 + + + diff --git a/test/test_utils.jl b/test/test_utils.jl index 005214d8..573fe49d 100644 --- a/test/test_utils.jl +++ b/test/test_utils.jl @@ -81,13 +81,14 @@ test_cross = CrossSection(Data_setCart3D, Lon_level=15, dims=(50,100), In # Create 3D volume with some fake data Grid = CreateCartGrid(size=(100,100,100), x=(0.0km, 99.9km), y=(-10.0km, 20.0km), z=(-40km,4km)); X,Y,Z = XYZGrid(Grid.coord1D...); -DataSet = CartData(X,Y,Z,(Depthdata=Z,)) +DataSet_Cart = CartData(X,Y,Z,(Depthdata=Z,)) -test_cross = CrossSection(DataSet, dims=(100,100), Interpolate=true, Start=(ustrip(Grid.min[1]),ustrip(Grid.max[2])), End=(ustrip(Grid.max[1]), ustrip(Grid.min[2]))) +test_cross_cart = CrossSection(DataSet_Cart, dims=(100,100), Interpolate=true, Start=(ustrip(Grid.min[1]),ustrip(Grid.max[2])), End=(ustrip(Grid.max[1]), ustrip(Grid.min[2]))) -flatten_cross = FlattenCrossSection(test_cross) +flatten_cross = FlattenCrossSection(test_cross_cart) @test flatten_cross[2][30]==1.0536089537226578 +@test test_cross_cart.fields.FlatCrossSection[2][30] == flatten_cross[2][30] # should be added by default # Flatten 3D CrossSection with GeoData Lon,Lat,Depth = LonLatDepthGrid(10:20,30:40,(-300:25:0)km); @@ -106,17 +107,33 @@ Data_sub_Interp = ExtractSubvolume(Data_set3D,Lon_level=(10,15), Lat_level=(30,3 @test Data_sub_Interp.fields[1][11]==-600km @test size(Data_sub_Interp.lat)==(51,21,32) +Data_sub_Interp_Cart = ExtractSubvolume(DataSet_Cart,X_level=(10,15), Y_level=(10,12), Interpolate=true, dims=(51,21,32)) +@test Data_sub_Interp_Cart.fields[1][11]==-40km +@test size(Data_sub_Interp_Cart.x)==(51,21,32) + +Data_cross_Interp_Cart = ExtractSubvolume(test_cross_cart,X_level=(10,50), Z_level=(-20,-5), dims=(51,61)) +@test Data_cross_Interp_Cart.fields[1][11]==18.0 +@test size(Data_cross_Interp_Cart.x)==(51,61,1) + # no interpolation Data_sub_NoInterp = ExtractSubvolume(Data_set3D,Lon_level=(10,15), Lat_level=(30,32), Interpolate=false, dims=(51,21,32)) @test Data_sub_NoInterp.fields[1][11]==-600km @test size(Data_sub_NoInterp.lat)==(6,3,13) +Data_sub_Interp_Cart = ExtractSubvolume(DataSet_Cart,X_level=(10,15), Y_level=(10,12), Interpolate=false, dims=(51,21,32)) +@test Data_sub_Interp_Cart.fields[1][5]==-40km +@test size(Data_sub_Interp_Cart.x)==(6,8,100) + + # Extract subset of cross-section test_cross = CrossSection(Data_set3D, Lat_level=35, dims=(50,100), Interpolate=true) Data_sub_cross = ExtractSubvolume(test_cross, Depth_level=(-100km,0km), Interpolate=false) @test Data_sub_cross.fields[1][11]==-200.00000000000003km @test size(Data_sub_cross.lat)==(50,1,34) +test_cross_cart = CrossSection(DataSet_Cart, Start=(0.0,-9.0), End=(90.0, 19.0)) # Cartesian cross-section + + # compute the mean velocity per depth in a 3D dataset and subtract the mean from the given velocities Data_pert = SubtractHorizontalMean(ustrip(Data)) # 3D, no units @test Data_pert[10] == 0.0 @@ -183,16 +200,15 @@ Data_VoteMap = VoteMap([Data_set3D_reverse, Data_set3D], ["Depthdata<-560","LonD X,Y,Z = LonLatDepthGrid(10:20,30:40,-50:-10); Data_C = ParaviewData(X,Y,Z,(Depth=Z,)) Data_C1 = RotateTranslateScale(Data_C, Rotate=30); -@test Data_C1.x.val[10] ≈ 20.964101615137753 -@test Data_C1.y.val[10] ≈ 32.66987298107781 +@test Data_C1.x.val[10] ≈ 1.4544826719043336 +@test Data_C1.y.val[10] ≈ 35.48076211353316 @test Data_C1.z.val[20] == -50 Data_C1 = RotateTranslateScale(Data_C, Scale=10, Rotate=10, Translate=(1,2,3)); -@test Data_C1.x.val[10] ≈ 213.78115820908607 -@test Data_C1.y.val[10] ≈ 339.4092822315127 +@test Data_C1.x.val[10] ≈ 136.01901977224043 +@test Data_C1.y.val[10] ≈ 330.43547966037914 @test Data_C1.z.val[20] == -497.0 - # create point data set (e.g. Earthquakes) Lon,Lat,Depth = LonLatDepthGrid(15:0.05:17,35:0.05:37,280km); Depth = Depth - 20*Lon*km; # some variation in depth diff --git a/tutorial/Tutorial_netCDF.jl b/tutorial/Tutorial_netCDF.jl index 4abe22df..04ee6edb 100644 --- a/tutorial/Tutorial_netCDF.jl +++ b/tutorial/Tutorial_netCDF.jl @@ -1,11 +1,11 @@ """ -This is Tutorial_netCDF.jl. It contains all the necessary commands to import a netCDF file from a seismic tomography, +This is Tutorial_netCDF.jl. It contains all the necessary commands to import a netCDF file from a seismic tomography, to convert it to the GeoData format and to export it to a Paraview format. The following steps are performed: 1. Read data from this file. 2. Put the data in a GeoData format (this is the format that is used internally in the GMG). 3. Export that data to a format readable by Paraview. -The data file used in this example can be downloaded here: +The data file used in this example can be downloaded here: [https://ds.iris.edu/files/products/emc/emc-files/El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-0.0.nc](https://ds.iris.edu/files/products/emc/emc-files/El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-0.0.nc) """ @@ -16,13 +16,13 @@ filename = "./El-Sharkawy-etal-G3.2020-MeRE2020-Mediterranean-0.0.nc" # this onl # 2. load desired data -# Now check with ncinfo(filename), what the variables are called exactly and what the contents of your netCDF file are +# Now check with ncinfo(filename), what the variables are called exactly and what the contents of your netCDF file are lat = ncread(filename,"latitude"); lon = ncread(filename,"longitude"); depth = ncread(filename,"depth"); vs = ncread(filename,"Vs"); -depth = -1 .* depth # CAREFUL: MAKE SURE DEPTH IS NEGATIVE, AS THIS IS THE ASSUMTPION IN GeoData +depth = -1 .* depth # CAREFUL: MAKE SURE DEPTH IS NEGATIVE, AS THIS IS THE ASSUMPTION IN GeoData # For netCDF data, 3D coordinates of a regular grid are only given as 1D vectors. As we need to compute Cartesian coordinates for # Paraview, we need the full matrix of grid coordinates @@ -33,4 +33,3 @@ Data_set = GeoData(Lon3D,Lat3D,Depth3D,(VS=vs,)) # Export the data structure to Paraview format Write_Paraview(Data_set, "test_netcdf_3D") -