-
Notifications
You must be signed in to change notification settings - Fork 0
/
get_era5_lai_high_all_basins_interannual.py
81 lines (64 loc) · 2.49 KB
/
get_era5_lai_high_all_basins_interannual.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import numpy as np
import iris
import iris.coord_categorisation
import pandas as pd
import matplotlib.pyplot as plt
from jpros.cube_funcs import *
from jpros import extract
from math import pi, sin
import warnings
warnings.filterwarnings("ignore")
def get_pixel_size(lon):
res = 360/len(lon)
lat = np.arange((len(lon)/2)+1)*res-90
r=6.371*1e6
rad=(2*pi/360) # (m)
da = np.nan * np.zeros((len(lat)-1)) # (m2)
for i in range(len(lat)-1):
da[i] = 2*pi*(1/len(lon))*r**2*(sin(rad*lat[i+1])-sin(rad*lat[i]))
return(da)
def get_area_weighted_avg(cube, mask, pixel_size_grid):
masked_data = np.ma.array(cube.data, mask=~mask)
masked_weights = np.ma.array(pixel_size_grid, mask=~mask)
avg = np.ma.average(masked_data, weights=masked_weights)
return(avg)
# Read in reanalysis datasets
path = ('/nfs/a68/gyjcab/datasets/lapse_data_harmonised/Jan_2018/Final/0.25deg/')
startyr = 2003
endyr = 2013
constraint = iris.Constraint(time=lambda cell:
datetime(startyr, 1, 1) <= cell.point <= datetime(endyr, 12, 31))
era5_rdn = iris.load_cube(path+'lai_high*era5*deg*.nc', constraint=constraint)
era5_dates = get_dates(era5_rdn)
# generate mask to extract data from basin
lats = get_lats(era5_rdn)
lons = get_lons(era5_rdn)
basin_shapes = ['purus_dissolved', 'madeira_dissolved',
'negro_dissolved', 'xingu_dissolved',
'jari_dissolved', 'japura_dissolved',
'branco_dissolved','aripuana_dissolved',
'solimoes_dissolved5', 'tapajos_dissolved3',
'amazon_obidos_dissolved']
results_dict = {}
for basin in basin_shapes:
print(basin)
mask = extract.get_mask(lats, lons, basin)
temp = 1*mask
pixel_size_grid = get_pixel_size(lons)
pixel_size_grid = np.array([pixel_size_grid]*len(lons)).transpose()
era5_vals = []
for yr in range(era5_rdn.shape[0]):
data = era5_rdn[yr, :, :]
amazon_mean = get_area_weighted_avg(data, mask, pixel_size_grid)
era5_vals.append(amazon_mean)
era5_df = pd.DataFrame()
era5_df['dates'] = era5_dates
era5_df['basin_mean'] = era5_vals
era5_df = era5_df.set_index('dates')
outdict = {}
outdict['era5'] = era5_df
basin_name = basin.split('_')[0]
results_dict[basin_name] = outdict
outpath = '/nfs/a68/gyjcab/datasets/et_analysis/'
np.save(outpath + 'reanalysis_all_basins_lai_high_'+ str(startyr) + '_' +
str(endyr) + '_interannual.npy', results_dict)