-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add header information to daily SWE and SWI csv files, fixes #11
- Loading branch information
Mark Robertson
committed
Mar 31, 2020
1 parent
82af729
commit 8c22ae7
Showing
2 changed files
with
73 additions
and
36 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,43 +1,75 @@ | ||
from datetime import datetime | ||
import os | ||
|
||
from snowav.database.database import collect | ||
import pandas as pd | ||
from datetime import timedelta | ||
|
||
def write_properties(args, values, logger): | ||
''' | ||
Write daily total snowpack properties to csv. | ||
|
||
def write_properties(end_date, cnx, plotorder, basins, wy_start, run_name, | ||
figs_path, values, vollbl='TAF', logger=None): | ||
""" Write daily total snowpack properties to csv. | ||
Args | ||
------ | ||
args : dict | ||
dictionary with required inputs, see swi() figure for more information. | ||
values : str | ||
snowav database value to query and write to csv | ||
''' | ||
end_date {str}: end_date | ||
cnx {str}: database connector | ||
plotorder {list}: basins list | ||
basins {dict}: basins dict | ||
wy_start {str}: YYYY1001 | ||
run_name {str}: snowav run_name | ||
figs_path {str}: path to save files | ||
values {list}: list of snowav values | ||
vollbl {str}: volume label | ||
depthlbl {str}: depth label | ||
logger {class}: logger | ||
""" | ||
|
||
datestr = args['end_date'] + timedelta(hours=24) | ||
datestr = datestr.strftime("%Y%m%d") | ||
datestr = end_date.strftime("%Y%m%d") | ||
now_str = datetime.now().date().strftime("%Y-%m-%d") | ||
date_col = 'Date generated: {}'.format(now_str) | ||
unit = vollbl | ||
|
||
for value in values: | ||
out = collect(args['connector'], args['plotorder'], args['basins'], | ||
args['wy_start'], args['end_date'], value, args['run_name'], | ||
'total','daily') | ||
out = collect(cnx, plotorder, basins, wy_start, end_date, value, | ||
run_name, 'total', 'daily') | ||
|
||
out.index = out.index.date | ||
|
||
# setting index to date strips the index name | ||
out.index.name = 'date' | ||
|
||
if 'vol' in value or 'avail' in value: | ||
unit = args['vollbl'] | ||
if value.lower() == 'swe_vol': | ||
value_line = ('Snow Water Equivalent (SWE) volume in thousands ' + | ||
'of acre-feet (TAF)') | ||
elif value.lower() == 'swi_vol': | ||
value_line = ('Surface Water Input (SWI) volume in thousands ' + | ||
'of acre-feet (TAF)') | ||
else: | ||
if logger is not None: | ||
logger.warning(" Value types other than swe, swi, and " | ||
"their derivates are not supported") | ||
return | ||
|
||
if 'z' in value or value == 'depth': | ||
unit = args['depthlbl'] | ||
headers = ['USDA Agicultural Research Service Snowpack Summary Data', | ||
value_line, | ||
'Data provided are daily model results from the iSnobal model', | ||
'First column is the date of model result', | ||
'Second column is the total basin volume', | ||
'Additional columns are the subbasins in the watershed', | ||
date_col, | ||
'Valid until next reports are generated', | ||
'Contact: Scott Havens <scott.havens@usda.gov>' | ||
'\n'] | ||
|
||
if value == 'density': | ||
unit = 'kg_m3' | ||
filename = '{}_timeseries_{}_{}.csv'.format(value, datestr, unit.lower()) | ||
path = os.path.join(os.path.abspath(figs_path), filename) | ||
|
||
if value == 'coldcont': | ||
unit = 'MJ' | ||
if os.path.isfile(path): | ||
os.remove(path) | ||
|
||
path = '{}{}_timeseries_{}_{}.csv'.format(args['figs_path'], value, datestr, unit.lower()) | ||
with open(path, mode='w', encoding='utf-8') as f: | ||
f.write('\n'.join(headers)) | ||
|
||
logger.info(' saving {}'.format(path)) | ||
out.to_csv(path, encoding='utf-8', mode='a') | ||
|
||
out.to_csv(path) | ||
if logger is not None: | ||
logger.info(' Saved: {}'.format(path)) |