Skip to content

Commit

Permalink
Merge f4bab84 into cd0f907
Browse files Browse the repository at this point in the history
  • Loading branch information
Balinus committed Apr 30, 2020
2 parents cd0f907 + f4bab84 commit ba038e9
Show file tree
Hide file tree
Showing 11 changed files with 212 additions and 137 deletions.
11 changes: 6 additions & 5 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@ IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab"
NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
NetCDF = "30363a11-5582-574a-97bb-aa9a979735b9"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Polynomials = "f27b6e38-b328-58d1-80ce-0feddd5e7a45"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Shapefile = "8e980c4a-a4fe-5da2-b3a7-4b4b0353a2f4"
Expand All @@ -33,16 +35,15 @@ AxisArrays = "0.3, 0.4"
ClimateBase = "0.6"
DataFrames = "0.19, 0.20"
Distances = "0.7, 0.8"
Extremes = "0.1.1"
Extremes = "0.1"
GeoStats = "0.11"
Interpolations = "0.12"
InverseDistanceWeighting = "0.2, 0.3"
IterTools = "1.1, 1.2, 1.3"
NCDatasets = "0.9, 0.10"
NaNMath = "0.3"
NetCDF = "0.7, 0.8, 0.9"
Polynomials = "0.5, 0.6, 0.7"
NetCDF = "0.7, 0.8, 0.9, 0.10"
Polynomials = "0.5, 0.6, 0.7, 0.8"
ProgressMeter = "1"
Reexport = "0.2"
Shapefile = "0.4, 0.5, 0.6"
julia = "1.2, 1.3"
julia = "1.2, 1.3, 1.4"
68 changes: 6 additions & 62 deletions deps/build.jl
Original file line number Diff line number Diff line change
@@ -1,65 +1,9 @@
# using Pkg
using Pkg
using PyCall

# using BinDeps
# import CondaBinDeps
if lowercase(get(ENV, "CI", "false")) == "true"

# if lowercase(get(ENV, "CI", "false")) == "true"
ENV["PYTHON"] = ""
Pkg.build("PyCall")

# let basepython = get(ENV, "PYTHON", "python2")
# envpath = joinpath(@__DIR__, "env")
# run(`pip install --user virtualenv`)
# run(`virtualenv --python=$basepython $envpath`)

# if Sys.iswindows()
# python = joinpath(envpath, "Scripts", "python.exe")
# else
# python = joinpath(envpath, "bin", "python2")
# end
# run(`$python -m pip install numpy`)
# run(`$python -m pip install scipy`)
# run(`$python -m pip install matplotlib`)
# run(`$python -m pip install https://github.com/matplotlib/basemap/archive/v1.0.7rel.tar.gz`)
# run(`$python -m pip install git+https://github.com/matplotlib/cmocean`)

# ENV["PYTHON"] = python
# Pkg.build("PyCall")

# if VERSION >= v"0.7.0"
# using Libdl
# end

# function validate_netcdf_version(name,handle)
# f = Libdl.dlsym_e(handle, "nc_inq_libvers")
# #
# # Example
# # banner = "4.5.6.7 of Apr 1 2000 00:00:00"
# banner = unsafe_string(ccall(f,Ptr{UInt8},()))

# # remove the date
# # verstr = "4.5.6.7"
# verstr = split(banner)[1]

# # vernumbers = ["4","5","6","7"]
# vernumbers = split(verstr,r"[.-]")

# # major_minor_patch = ["4","5","6"]
# major_minor_patch = vernumbers[1:min(3,length(vernumbers))]

# # ver = v"4.5.6"
# ver = VersionNumber([parse(Int,s) for s in major_minor_patch]...)

# return ver > v"4.2"
# end

# @BinDeps.setup
# libnetcdf = library_dependency("libnetcdf", aliases = ["libnetcdf4","libnetcdf-7","netcdf"], validate = validate_netcdf_version)

# CondaBinDeps.Conda.add_channel("anaconda")
# provides(CondaBinDeps.Manager, "libnetcdf", libnetcdf)
# provides(AptGet, "libnetcdf-dev", libnetcdf, os = :Linux)
# # provides(Yum, "netcdf-devel", libnetcdf, os = :Linux)

# @BinDeps.install Dict(:libnetcdf => :libnetcdf)

# end
# end
end
3 changes: 3 additions & 0 deletions docs/Project.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
[deps]
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"

[compat]
Documenter = "0.20, 0.21, 0.22, 0.23, 0.24"
PyCall = "1.91"
3 changes: 2 additions & 1 deletion docs/make.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
using Pkg
Pkg.activate(@__DIR__)
CI = get(ENV, "CI", nothing) == "true"
ENV["PYTHON"] = ""
Pkg.build("PyCall")
using Documenter, ClimateTools

makedocs(sitename = "ClimateTools.jl",
Expand Down
21 changes: 18 additions & 3 deletions docs/src/interpolation.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,31 @@

A typical step in climate analysis is to interpolate a given grid onto another grid. `ClimateTools` provides such a tool by wrapping Scipy griddata function. It is intended for visualization or as a 1st step before bias-correcting the `ClimGrid` dataset.

[`regrid`](@ref) function will interpolate the data contained in `ClimGrid A` into the coordinates of `ClimGrid B` and returns a new `ClimGrid C` which contains the interpolated data of `A` into the grid of `B`.
[`griddata`](@ref) function will interpolate the data contained in `ClimGrid A` into the coordinates of `ClimGrid B` and returns a new `ClimGrid C` which contains the interpolated data of `A` into the grid of `B`.

```julia
C = regrid(A::ClimGrid, B::ClimGrid)
C = griddata(A::ClimGrid, B::ClimGrid)
```

It is also possible to interpolate a `ClimGrid` onto specified longitude and latitude vectors and arrays.

```julia
C = regrid(A::ClimGrid, lon::AbstractArray{N, T} where N where T, lat::AbstractArray{N, T} where N where T; dimx=[], dimy=[], method::String="linear", min=[], max=[])
C = griddata(A::ClimGrid, lon::AbstractArray{N, T} where N where T, lat::AbstractArray{N, T} where N where T; dimx=[], dimy=[], method::String="linear", min=[], max=[])
```

In the case a longitude and latitude 2D array is provided, the user needs to provide the dimension vectors for `x` and `y`.

## Experimental

ClimateTools also provide a way to uses `GeoStats` geostatistics methods. See function `geostats` for some details.

```julia
using GeoStats
using ClimateTools

target = :pr # e.g. precipitation
n = 30 # max number of neighboring points
solver = Kriging(target => (maxneighbors=n,))

C = geostats(A::ClimGrid, B::ClimGrid, solver=solver)
```
12 changes: 10 additions & 2 deletions src/ClimateTools.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ using Statistics
using Random
using Dates
using GeoStats
using InverseDistanceWeighting
# using InverseDistanceWeighting
using Extremes
using Distances
using PyCall
import Base.vcat
import Base.getindex
import Base.show
Expand All @@ -40,6 +41,12 @@ import Base: *
import Base: /
import Base.findmax

const scipy = PyNULL()

function __init__()
copy!(scipy, pyimport_conda("scipy.interpolate", "scipy"))
end

# Included files
include("functions.jl")
include("indices.jl")
Expand All @@ -63,8 +70,9 @@ export drought_dc
export ensemble_mean, ensemble_std, ensemble_max, ensemble_min
export load, load2D
export regrid, applymask
export griddata
export shapefile_coords, shapefile_coords_poly
export resample, spatialsubset
export resample, spatialsubset, timestep
export qqmap, qqmaptf
export biascorrect_extremes
export permute_west_east
Expand Down
7 changes: 2 additions & 5 deletions src/cf_conventions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@ function get_dimname(ds::NCDatasets.Dataset, dim::String)
found_dim = "NA"

# try finding with the "axis" attribute

for idim in dimensions
if idim != "bnds" && idim != "vertices"
if idim != "bnds" && idim != "vertices" && idim != "ts" && idim != "string1"
if haskey(ds[idim].attrib, "axis")
if ds[idim].attrib["axis"] == dim
found_dim = idim
Expand All @@ -22,7 +21,6 @@ function get_dimname(ds::NCDatasets.Dataset, dim::String)
end

# if found_dim is still not found, try to find it with the standard_name

if found_dim == "NA"

dim_dict = Dict(["T" => ["time"],
Expand All @@ -31,15 +29,14 @@ function get_dimname(ds::NCDatasets.Dataset, dim::String)
"Y" => ["latitude"]])

for idim in dimensions
if idim != "bnds" && idim != "vertices"
if idim != "bnds" && idim != "vertices" && idim != "ts" && idim != "string1"
attribs = ds[idim].attrib
if haskey(attribs, "standard_name")
name = "standard_name"
elseif haskey(attribs, "long_name")
name = "long_name"
end
if in(ds[idim].attrib[name], dim_dict[dim])
# ds[idim].attrib[name] == dim_dict[dim]
found_dim = idim
end
end
Expand Down

0 comments on commit ba038e9

Please sign in to comment.