/
gfs_pv_fcst.py
95 lines (71 loc) · 3.25 KB
/
gfs_pv_fcst.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# This script calculates potential vorticity (PV) from variables found in the GFS forecast model grib files. This script is originally from Taylor Mandelbaum, SBU.
# Adjustments have been made by Lindsay Blank, NCAR.
# July 2020
###################################################################################################
import sys
import os
import re
import datetime as dt
from metpy import calc as mpcalc
from metpy.units import units
import xarray as xr
import cfgrib
###################################################################################################
def pv(input_file):
# Vars
grib_vars = ['t','u','v']
# Load a list of datasets, one for each variable we want
ds_list = [cfgrib.open_datasets(input_file,backend_kwargs={'filter_by_keys':{'typeOfLevel':'isobaricInhPa','shortName':v},'indexpath':''}) for v in grib_vars]
# Flatten the list of lists to a single list of datasets
ds_flat = [x.sel(isobaricInhPa=x.isobaricInhPa[x.isobaricInhPa>=100.0].values) for ds in ds_list for x in ds]
# Merge the variables into a single dataset
ds = xr.merge(ds_flat)
# Add pressure
ds['p'] = xr.DataArray(ds.isobaricInhPa.values,dims=['isobaricInhPa'],coords={'isobaricInhPa':ds.isobaricInhPa.values},attrs={'units':'hPa'}).broadcast_like(ds['t'])
# Calculate potential temperature
ds['theta'] = mpcalc.potential_temperature(ds['p'].metpy.convert_units('Pa'),ds['t'])
# Compute baroclinic PV
ds['pv'] = mpcalc.potential_vorticity_baroclinic(ds['theta'],ds['p'].metpy.convert_units('Pa'),ds['u'],ds['v'],latitude=ds.latitude)/(1.0e-6)
met_data = ds['pv'].sel(isobaricInhPa=slice(float(os.environ.get('PV_LAYER_MAX_PRESSURE',1000.0)),float(os.environ.get('PV_LAYER_MIN_PRESSURE',100.0)))).mean(axis=0).values
return met_data
###################################################################################################
input_file = os.path.expandvars(sys.argv[1])
data = pv(input_file) #Call function to calculate PV
met_data = data
met_data = met_data.astype('float64')
print("max", data.max())
print("min", data.min())
# Automatically fill out time information from input file.
file_regex = r"^.*([0-9]{8}_[0-9]{4})_([0-9]{3}).*$"
match = re.match(file_regex, os.path.basename(input_file).replace('-', '_'))
if not match:
print(f"Could not extract time information from filename: {input_file} using regex {file_regex}")
sys.exit(1)
init = dt.datetime.strptime(match.group(1), '%Y%m%d_%H%M')
lead = int(match.group(2))
valid = init + dt.timedelta(hours=lead)
print("Data Shape: " + repr(met_data.shape))
print("Data Type: " + repr(met_data.dtype))
print(valid)
print(init)
print(lead)
attrs = {
'valid': valid.strftime("%Y%m%d_%H%M%S"),
'init': init.strftime("%Y%m%d_%H%M%S"),
'lead': str(int(lead)),
'accum': '00',
'name': 'pv',
'long_name': 'potential_vorticity',
'level': 'Surface',
'units': 'PV Units',
'grid': {
'name': 'Global 0.5 Degree',
'type' : 'LatLon',
'lat_ll' : -90.0,
'lon_ll' : 0.0,
'delta_lat' : 0.5,
'delta_lon' : 0.5,
'Nlat' : 361,
'Nlon' : 720,
}
}