diff --git a/code/evaluate_prediction_ncl.sh b/code/evaluate_prediction_ncl.sh new file mode 100644 index 0000000..27c3607 --- /dev/null +++ b/code/evaluate_prediction_ncl.sh @@ -0,0 +1,260 @@ +#!/bin/bash + +# Setting env variables +export YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') +export wfname="/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_"$YYYYMMDD_POST"_results.txt" + +export obs_dir_NCL="/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X" +export ofname="/AQF5X_Hourly_" + +export postdata_dir="/groups/ESS/aalnaim/cmaq/prediction_nc_files/" + +export mfname="COMBINE3D_ACONC_v531_gcc_AQF5X_"$YYYYMMDD_POST"_ML_extracted.nc" + +export grid_fname="/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_"$YYYYMMDD_POST".nc" #This needs to be auto date + +export dx=12000 + +module load ncl + +cat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl + +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl" + +setvalues NhlGetWorkspaceObjectId() +"wsMaximumSize": 600000000 +end setvalues + +begin +sdate=getenv("YYYYMMDD_POST") +wfname=getenv("wfname") +obs_dir=getenv("obs_dir_NCL") +ofname=getenv("ofname") +mod_dir=getenv("postdata_dir") +mfname=getenv("mfname") +dkm=tofloat(getenv("dx")) +grid_fname=(getenv("grid_fname")) + +maxdist=dkm/90000.0*1.414 +maxarea=0.25 +thd=70 + +;-----read model lat lon------ +;read lat lon +f1 = addfile(grid_fname,"r") +mlat = f1->LAT(0,0,:,:) +mlon = f1->LON(0,0,:,:) +delete(f1) +mlat1d = ndtooned(mlat) +mlon1d = ndtooned(mlon) +delete([/mlat,mlon/]) + +;-----read cmaq results----- +f2 = addfile(mod_dir+mfname,"r") +mO3 = f2->O3(:,:,:) ;ppb + + +nt = dimsizes(mO3(:,0,0)) +ny = dimsizes(mO3(0,:,0)) +nx = dimsizes(mO3(0,0,:)) + +m8O3 = new((/17,ny,nx/),"double") +m8maxO3 = new((/ny,nx/),"double") + +do ih=0,16 + m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0) +end do +m8maxO3 = dim_max_n(m8O3,0) ;type double +mO31d_d=ndtooned(m8maxO3) ; type double +mO31d=tofloat(mO31d_d) + +delete([/f2,mO3,m8O3,m8maxO3/]) + +;-----read obs----- +syyyy1=str_get_cols(sdate,0,3) +smm1=str_get_cols(sdate,4,5) +sdd1=str_get_cols(sdate,6,7) + +ymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1) +syyyy2=tostring_with_format(ymd(0),"%0.4i") +smm2=tostring_with_format(ymd(1),"%0.2i") +sdd2=tostring_with_format(ymd(2),"%0.2i") + +tolat=(/-999.0/) ;set the first data to 0 +tolon=tolat +toO3=tolat + +do ih=12,35 + if (ih.lt.24) then + shh=tostring_with_format(ih,"%0.2i") + syyyy=syyyy1 + smm=smm1 + sdd=sdd1 + else + shh=tostring_with_format(ih-24,"%0.2i") + syyyy=syyyy2 + smm=smm2 + sdd=sdd2 + end if + data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+".dat",-1,"string") + xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,",")),0) + yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,",")),0) + zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,",")),0) + delete([/tolat,tolon,toO3/]) + tolat=xx + tolon=yy + toO3=zz + delete([/xx,yy,zz/]) + delete(data) +end do + +toO3@_FillValue = -999.0 + +;-----calculate max ave 8 hour o3----- +oflag=tolat*0+1 +aa=ind((oflag.gt.0).and.(toO3.ge.0)) +ii=0 +print("8h start") +if (any(ismissing(aa))) then + iflag=0 +else + iflag=1 + olat=(/tolat(aa(0))/) + olon=(/tolon(aa(0))/) + oO3=(/-999.0/) + o8O3 = new(17,"float") + o8O3 = -999.0 +end if +delete(aa) +do while (iflag.gt.0) + aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0)) + oflag(aa)=0 + if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here + do ih = 0, 16 + o8O3(ih) = avg(toO3(aa(ih:ih+7))) + end do + oO3(ii)=max(o8O3) + end if + o8O3 = -999.0 + delete(aa) + aa=ind((oflag.gt.0).and.(toO3.ge.0)) + if (any(ismissing(aa))) then + iflag=0 + else + xx=array_append_record(olat,(/tolat(aa(0))/),0) + yy=array_append_record(olon,(/tolon(aa(0))/),0) + zz=array_append_record(oO3,(/-999.0/),0) + delete([/olat,olon,oO3/]) + olat=xx + olon=yy + oO3=zz + delete([/xx,yy,zz/]) + ii=ii+1 + end if + delete(aa) +end do +print("obs 8hour max end") +aa=ind(oO3.ge.0) +nobs=dimsizes(aa) +olat24=olat(aa) +olon24=olon(aa) +oO324=oO3(aa) +print("TYPE of oO324: "+typeof(oO324)) +delete([/aa,olat,olon,oO3/]) +mO324=oO324*0-999.0 +print("TYPE of mO324: "+typeof(mO324)) +print("TYPE of mO31d: "+typeof(mO31d)) +areaa=oO324*0-999.0 +areab=areaa +aread=areaa + +;-----find model point----- +do in=0,nobs-1 + dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2) + aa=minind(dis) + ;print(in+" "+aa) + if (dis(aa).lt.maxdist) then + mO324(in)=mO31d(aa) + cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\ + (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea))) + areaa(in)=0 + areab(in)=0 + if (oO324(in).ge.thd) then + aread(in)=0 + if (max(mO31d(cc)).ge.thd) then + areab(in)=1 + else + aread(in)=1 + end if + else + bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\ + (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea))) + if (max(mO31d(aa)).ge.thd) then + if (max(oO324(bb)).ge.thd) then + areaa(in)=0 + else + areaa(in)=1 + end if + else + areaa(in)=0 + end if + delete(bb) + end if + delete(cc) + end if + delete(aa) +end do + +;-----cal rmse corr nme nmb me mb----- +tt=ind((mO324.ge.0).and.(oO324.ge.0)) + +if (any(ismissing(tt))) then + rmse=-999.0 + corr=-999.0 + nmb=-999.0 + nme=-999.0 + me=-999.0 + mb=-999.0 +else + rmse=dim_rmsd_n(oO324(tt),mO324(tt),0) + corr=esccr(oO324(tt),mO324(tt),0) + nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt)) + nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt)) + me=avg(abs(oO324(tt)-mO324(tt))) + mb=avg((mO324(tt)-oO324(tt))) +end if +;-----cal ah afar----- +aa=ind((areaa+areab).gt.0) +bb=ind((aread+areab).gt.0) +if (any(ismissing(aa))) then + afar=0. +else + afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100 +end if +delete(aa) +if (any(ismissing(bb))) then + ah=-999.0 +else + ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100 +end if +delete(bb) +write_table(wfname,"a",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\ + "%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f") +delete(tt) +end + +EOF + + +ncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl + +if [ $? -eq 0 ]; then + echo "Evaluation Completed Successfully" + echo "Removing ncl file: geoweaver_eva_daily_O3.ncl..." + rm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl +else + echo "Evaluation Failed!" +fi + diff --git a/code/generate_AirNow_ncl.sh b/code/generate_AirNow_ncl.sh new file mode 100644 index 0000000..d89d538 --- /dev/null +++ b/code/generate_AirNow_ncl.sh @@ -0,0 +1,324 @@ +#!/bin/bash + +# Setting env variables +export YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') +export stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') +export eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') + + +export postdata_dir="/groups/ESS/aalnaim/cmaq/prediction_nc_files" +export mcip_dir="/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km" +export graph_dir="/groups/ESS/aalnaim/cmaq/plots" + +export obs_dir_NCL="/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X" + +module load ncl + +cat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl" + +setvalues NhlGetWorkspaceObjectId() +"wsMaximumSize": 600000000 +end setvalues + +begin + +date = getenv("YYYYMMDD_POST") +d1 = getenv("stdate_post") +d2 = getenv("eddate_post") + +obs_dir = getenv("obs_dir_NCL") +plot_dir = getenv("graph_dir") + +hr=new(24,"string") +hr=(/"00","01","02","03","04","05","06","07","08","09","10","11","12","13","14","15","16","17","18","19","20","21","22","23"/) + +print(plot_dir) +aconc_dir = getenv("postdata_dir") +grid_dir = getenv("mcip_dir") + +cdf_file1 = addfile(aconc_dir+"/COMBINE3D_ACONC_v531_gcc_AQF5X_"+date+"_ML_extracted.nc","r") +cdf_file= addfile(grid_dir+"/GRIDCRO2D_"+date+".nc","r") +cdf_file2= addfile(grid_dir+"/METCRO2D_"+date+".nc","r") + +time = cdf_file1->TFLAG(:,0,:) +o3 = cdf_file1->O3(:,:,:) ;ppb +wspd10=cdf_file2->WSPD10(:,0,:,:) +wdir10=cdf_file2->WDIR10(:,0,:,:) + +temp = cdf_file2->TEMP2 + +nt = dimsizes(o3(:,0,0)) +ny = dimsizes(o3(0,:,0)) +nx = dimsizes(o3(0,0,:)) + +print(max(temp)) +print(min(temp)) +print(avg(temp)) + + + +print(nt+" "+ny+" "+nx) +print(max(o3)) +print(min(o3)) +print(avg(o3)) + +lat = cdf_file->LAT(0,0,:,:) +lon = cdf_file->LON(0,0,:,:) + +o3@lat2d = lat +o3@lon2d = lon +o3@unit = "ppbv" + +UV10=wind_component(wspd10,wdir10,0) +UV10@lat2d = lat +UV10@lon2d = lon + + +res = True +res@gsnMaximize = True ; maximize pot in frame +res@gsnFrame = False ; dont advance frame +res@gsnDraw = False +res@gsnLeftString = "" +res@gsnRightString = "" +res@txFont = "times-roman" +res@tiMainFont = "times-roman" +;res@tiMainFontHeightF = 0.02 +;res@vpWidthF = 0.7 +;res@vpHeightF = 0.7 + +;;set map;; +mpres = res +mpres@mpLimitMode = "LatLon" +mpres@mpDataSetName = "Earth..4" +mpres@mpDataBaseVersion = "MediumRes" +mpres@mpOutlineOn = True +mpres@mpGeophysicalLineThicknessF = 1.5 +mpres@mpFillDrawOrder = "PostDraw" +mpres@mpFillOn = False +mpres@mpAreaMaskingOn = True +mpres@mpOutlineBoundarySets = "GeophysicalAndUSStates" +mpres@mpOutlineSpecifiers = "United States:States" +mpres@mpProjection = "LambertConformal" +mpres@mpLambertParallel1F = 33. +mpres@mpLambertParallel2F = 45. +mpres@mpLambertMeridianF = -98. +mpres@mpMinLonF = -120 ;min(lon)+0.2 +mpres@mpMaxLonF = -70 ;max(lon)-0.2 +mpres@mpMinLatF = 25 ;min(lat)+0.05 +mpres@mpMaxLatF = 50 ;max(lat)-0.05 +mpres@pmTickMarkDisplayMode = "Always" +mpres@mpLandFillColor = "white" +mpres@mpInlandWaterFillColor = "white" +mpres@mpOceanFillColor = "white" +mpres@mpGeophysicalLineColor = "Black" + +;mpres@lbLabelAutoStride = True +mpres@tiXAxisFont = 25 +mpres@pmTickMarkDisplayMode = "Always" +mpres@tmXBLabelFont = 25 +mpres@tmXBLabelFontHeightF = 0.013 +mpres@tmXBLabelDeltaF = -0.5 +mpres@tmYLLabelFont = 25 +mpres@tmYLLabelFontHeightF = 0.013 +mpres@tmXBLabelDeltaF = -0.5 +mpres@tmXTLabelsOn = False +mpres@tmXTLabelFont = 25 +mpres@tmXTLabelFontHeightF = 0.013 +mpres@tmYRLabelsOn = False +mpres@tmYRLabelFont = 25 +mpres@tmYRLabelFontHeightF = 0.013 + +;;set contour;; +cnres = res +cnres@cnFillDrawOrder = "PreDraw" +cnres@cnFillOn = True +cnres@cnLinesOn = False +cnres@cnLineLabelsOn = False +cnres@lbLabelFont = 25 +cnres@lbLabelFontHeightF = 0.013 +cnres@tiXAxisFont = 25 +cnres@pmLabelBarWidthF = 0.5 +cnres@pmLabelBarHeightF = 0.1 +;cnres@pmLabelBarOrthogonalPosF = -0.02 +cnres@lbLabelAutoStride = True + +;set vector;; +res_vc = res +res_vc@vcGlyphStyle = "LineArrow" +res_vc@vcLineArrowThicknessF = 3 +res_vc@vcMinDistanceF = 0.03 +res_vc@vcRefLengthF = 0.03 +res_vc@vcRefAnnoOn = True +res_vc@vcRefMagnitudeF = 16 +res_vc@vcRefAnnoString1 = "16m/s" +res_vc@vcRefAnnoSide = "Top" +res_vc@vcRefAnnoString2On = False +res_vc@vcRefAnnoPerimOn = False +res_vc@vcRefAnnoOrthogonalPosF = -0.02 +res_vc@vcRefAnnoParallelPosF = 0.999 +;res_vc@vcRefAnnoBackgroundColor = "White" +res_vc@vcVectorDrawOrder = "PostDraw" + +do it = 0, nt-1 + if (it .lt. 12) then + pdate=d1 + else + pdate=d2 + end if + + ;print(time(it,0)+" "+time(it,1)) + rundate = yyyyddd_to_yyyymmdd( time(it,0) ) + runtime = hr( tointeger(time(it,1)/10000) ) + + site = readAsciiTable(obs_dir+"/AQF5X_Hourly_"+rundate+runtime+".dat",1,"string",1) + nrows = dimsizes(site) + sitename = str_get_field(site,1,",") + sitelat = stringtofloat(str_get_field(site,2,",")) + sitelon = stringtofloat(str_get_field(site,3,",")) + O3_obs = stringtofloat(str_get_field(site,4,",")) + + obslon = sitelon(:,0) + obslat = sitelat(:,0) + obsO3 = O3_obs(:,0) + + npts = nrows(0) + + obsO3@_FillValue = -999. + +;--- levels for dividing + levels_O3 = ispan(0,80,4) + + nlevels = dimsizes(levels_O3) + + colors = span_color_rgba("WhiteBlueGreenYellowRed",nlevels+1) + + num_distinct_markers = nlevels+1 ; number of distinct markers + lat_O3 = new((/num_distinct_markers,npts/),float) + lon_O3 = new((/num_distinct_markers,npts/),float) + lat_O3 = -999 + lon_O3 = -999 + + +; +; Group the points according to which range they fall in. At the +; same time, create the label that we will use later in the labelbar +; + do i = 0, num_distinct_markers-1 + if (i.eq.0) then + indexes_O3 = ind(obsO3(:).lt.levels_O3(0)) + end if + if (i.eq.num_distinct_markers-1) then + indexes_O3 = ind(obsO3(:).ge.max(levels_O3)) + end if + if (i.gt.0.and.i.lt.num_distinct_markers-1) then + indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i)) + end if + +; +; Now that we have the set of indexes whose values fall within +; the given range, take the corresponding lat/lon values and store +; them, so later we can color this set of markers with the appropriate +; color. +; + if (.not.any(ismissing(indexes_O3))) then + npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range. + + lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3) + lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3) + ;print("O3: "+npts_range_O3) + end if + + + delete(indexes_O3) ; Necessary b/c "indexes" may be a different + end do + + lat_O3@_FillValue = -999 + lon_O3@_FillValue = -999 + + gsres = True + gsres@gsMarkerIndex = 16 ; Use filled dots for markers. + + hollowres = True + hollowres@gsMarkerIndex = 4 + hollowres@gsMarkerColor = "black" + hollowres@gsMarkerSizeF = 0.008 + +;;;;;;;;; Plot Ozone + pname=plot_dir+"/OBS-FORECAST_O3_"+rundate+runtime + wks = gsn_open_wks("png",pname) + gsn_define_colormap(wks, "WhiteBlueGreenYellowRed") + + pmid_O3 = new(num_distinct_markers,graphic) + hollow_O3 = new(num_distinct_markers,graphic) + + cnres@tiMainString = pdate+" "+runtime+" UTC O~B~3~N~ (ppbV)" + cnres@cnLevelSelectionMode = "ManualLevels" + cnres@cnMinLevelValF = 0. + cnres@cnMaxLevelValF = 80 + cnres@cnLevelSpacingF = 4 + + ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res) + map = gsn_csm_map(wks,mpres) + contour = gsn_csm_contour(wks,o3(it,:,:),cnres) + vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc) + overlay(map,contour) + overlay(map,vector) + + pmid = new(num_distinct_markers,graphic) + hollow = new(num_distinct_markers,graphic) + do i = 0, num_distinct_markers-1 + if (.not.ismissing(lat_O3(i,0))) + gsres@gsMarkerColor = colors(i,:) + gsres@gsMarkerSizeF = 0.008 + gsres@gsMarkerThicknessF = 1 + pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres) + hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres) + end if + end do + + draw(map) + frame(wks) + delete(wks) + delete(pmid_O3) + delete(hollow_O3) + system("composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png "+pname+".png "+pname+".png") + + + delete(pmid) + delete(hollow) + delete(site) + delete(sitename) + delete(sitelat) + delete(sitelon) + delete(O3_obs) + delete(obslon) + delete(obslat) + delete(obsO3) + delete([/lon_O3,lat_O3/]) + +end do +delete(res) + +;/ + +end +EOF + + +ncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl + +convert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/"Airnow_"$YYYYMMDD_POST.gif + +if [ $? -eq 0 ]; then + echo "Generating AirNow images/gif Completed Successfully" + echo "Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl..." + rm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl +else + echo "Generating AirNow images/gif Failed!" + echo "Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl..." + rm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl +fi + diff --git a/code/generate_images_ncl.sh b/code/generate_images_ncl.sh new file mode 100644 index 0000000..ee67616 --- /dev/null +++ b/code/generate_images_ncl.sh @@ -0,0 +1,171 @@ +#!/bin/bash + +# Setting env variables +export YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d "-2 day ${1}" +%Y%m%d` +export stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date +export eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') #This needs to be auto date + +export postdata_dir="/groups/ESS/aalnaim/cmaq/prediction_nc_files" +export mcip_dir="/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km" +export dir_graph="/groups/ESS/aalnaim/cmaq/plots" + +module load ncl + +cat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl" + +setvalues NhlGetWorkspaceObjectId() +"wsMaximumSize": 600000000 +end setvalues + +begin + +date = getenv("YYYYMMDD_POST") +d1 = getenv("stdate_post") +d2 = getenv("eddate_post") + +;print("Passed Date: "+date) + +;aconc_dir = getenv("postdata_dir") +grid_dir = getenv("mcip_dir") +plot_dir = getenv("dir_graph") + +cdf_file1 = addfile("/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_"+date+"_ML_extracted.nc","r") +cdf_file= addfile(grid_dir+"/GRIDCRO2D_"+date+".nc","r") + +ptime = (/"12","13","14","15","16","17","18","19","20","21","22","23","00","01","02","03","04","05","06","07","08","09","10","11"/) + +time = cdf_file1->TFLAG(:,0,:) +o3 = cdf_file1->O3(:,:,:) ;ppb +;pm25 = cdf_file1->PM25_TOT(:,0,:,:) + + +nt = dimsizes(o3(:,0,0)) +ny = dimsizes(o3(0,:,0)) +nx = dimsizes(o3(0,0,:)) + +print(nt+" "+ny+" "+nx) +print(max(o3)) +print(min(o3)) +print(avg(o3)) + +;print(max(pm25)) +;print(min(pm25)) +;print(avg(pm25)) + +;print(time) + +lat = cdf_file->LAT(0,0,:,:) +lon = cdf_file->LON(0,0,:,:) + +o3@lat2d = lat +o3@lon2d = lon + +res = True +res@gsnMaximize = True ; maximize pot in frame +res@gsnFrame = False ; don't advance frame +res@gsnDraw = False +;res@gsnSpreadColors = True +res@lbLabelAutoStride = True +;res@lbBoxLinesOn = False +res@pmLabelBarHeightF = 0.1 +res@pmLabelBarWidthF = 0.5 +res@cnFillOn=True +;res@cnMonoFillPattern=True +;res@cnMonoLineColor=True +res@cnLinesOn=False +;res@pmLabelBarDisplayMode="never" +res@gsnLeftString = ""; +res@gsnRightString = "" + +res@mpLimitMode = "LatLon" +res@mpMinLonF = -120 ;min(lon)+0.2 +res@mpMaxLonF = -70 ;max(lon)-0.2 +res@mpMinLatF = 25 ;min(lat)+0.05 +res@mpMaxLatF = 50 ;max(lat)-0.05 +res@mpDataBaseVersion = "MediumRes" +;res@tiMainString = times(it) +res@mpDataBaseVersion = "MediumRes" +res@mpDataSetName = "Earth..4" +res@mpAreaMaskingOn = True +res@mpOutlineBoundarySets = "GeophysicalAndUSStates" +res@mpOutlineSpecifiers="United States : States" +res@mpLandFillColor = "white" +res@mpInlandWaterFillColor = "white" +res@mpOceanFillColor = "white" +res@mpGeophysicalLineColor = "Black" +res@mpGeophysicalLineThicknessF = 1.5 + +;res@gsnSpreadColors = True +res@lbLabelAutoStride = True +res@lbLabelFont = 25 +res@tiXAxisFont = 25 +res@pmTickMarkDisplayMode = "Always" +res@tmXBLabelFont = 25 +res@tmXBLabelFontHeightF = 0.013 +res@tmXBLabelDeltaF = -0.5 +res@tmYLLabelFont = 25 +res@tmYLLabelFontHeightF = 0.013 +res@tmXBLabelDeltaF = -0.5 +res@tmXTLabelsOn = False +res@tmXTLabelFont = 25 +res@tmXTLabelFontHeightF = 0.013 +res@tmYRLabelsOn = False +res@tmYRLabelFont = 25 +res@tmYRLabelFontHeightF = 0.013 + + +res@mpProjection = "LambertConformal" ;"CylindricalEquidistant" +res@mpLambertParallel1F = 33. +res@mpLambertParallel2F = 45. +res@mpLambertMeridianF = -98. + +res@cnLevelSelectionMode = "ManualLevels" +res@cnMinLevelValF = 0. +res@cnMaxLevelValF = 80 +res@cnLevelSpacingF = 4 + +res@txFont = "times-roman" +res@tiMainFont = "times-roman" + +do it = 0, nt-1 + if (it .lt. 12) then + pdate=d1 + else + pdate=d2 + end if + + pname=plot_dir+"/testPlot_"+pdate+"_"+ptime(it) + wks = gsn_open_wks("png",pname) + gsn_define_colormap(wks, "WhiteBlueGreenYellowRed") + + res@tiMainString = pdate+" "+ptime(it)+" UTC O~B~3~N~ Forecast (ppbV)" + plot = gsn_csm_contour_map(wks,o3(it,:,:),res) + draw(plot) + frame(wks) + delete(wks) + system("composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png "+pname+".png "+pname+".png") +end do +delete(res) + +end +EOF + + +ncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl + +# convert -delay 100 *.png 20220613_20220614.gif +convert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/"Map_"$YYYYMMDD_POST.gif + +if [ $? -eq 0 ]; then + echo "Generating images/gif Completed Successfully" + echo "Removing ncl file: geoweaver_plot_daily_O3.ncl..." + rm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl +else + echo "Generating images/gif Failed!" + echo "Removing ncl file: geoweaver_plot_daily_O3.ncl..." + rm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl +fi + diff --git a/code/process.json b/code/process.json new file mode 100644 index 0000000..93527fd --- /dev/null +++ b/code/process.json @@ -0,0 +1,57 @@ +[{ + "id" : "iicy7w", + "name" : "generate_images_ncl", + "description" : null, + "code" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/\"Map_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "lang" : "shell", + "owner" : "111111", + "confidential" : "FALSE" +},{ + "id" : "is1w3m", + "name" : "generate_AirNow_ncl", + "description" : null, + "code" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/\"Airnow_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "lang" : "shell", + "owner" : "111111", + "confidential" : "FALSE" +},{ + "id" : "fsk7f2", + "name" : "evaluate_prediction_ncl", + "description" : null, + "code" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_\"$YYYYMMDD_POST\".nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "lang" : "shell", + "owner" : "111111", + "confidential" : "FALSE" +},{ + "id" : "ex3vh9", + "name" : "processing_test_data", + "description" : null, + "code" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "lang" : "python", + "owner" : "111111", + "confidential" : "FALSE" +},{ + "id" : "b8uv5z", + "name" : "test_data", + "description" : null, + "code" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "lang" : "python", + "owner" : "111111", + "confidential" : "FALSE" +},{ + "id" : "l8vlic", + "name" : "rf_prediction", + "description" : null, + "code" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "lang" : "python", + "owner" : "111111", + "confidential" : "FALSE" +},{ + "id" : "3asyzj", + "name" : "processing_test_netcdf", + "description" : null, + "code" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\n\nprint(\"Opening: \"+\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\")\n \ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "lang" : "python", + "owner" : "111111", + "confidential" : "FALSE" +}] diff --git a/code/processing_test_data.py b/code/processing_test_data.py new file mode 100644 index 0000000..e807997 --- /dev/null +++ b/code/processing_test_data.py @@ -0,0 +1,213 @@ +import xarray as xr +import pandas as pd +import glob, os +import numpy as np +from pathlib import Path +import datetime +from datetime import timedelta +# home directory +home = str(Path.home()) + +base = datetime.datetime.today() - timedelta(days=2) +date_list = [base - timedelta(days=x) for x in range(2)] +days = [date.strftime('%Y%m%d') for date in date_list] + +aa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[] +#ff=[] +# k = time dimension - start from 12 to match with data +t = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11] +for i in days: + print(i) + # read cmaq results + # old files before 20210315 are not in diractory. must choose later date. + if int(i)>=20210315 and int(i)<=20210902: + files = glob.glob("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/"+"COMBINE3D_ACONC_v531_gcc_AQF5X_"+i+"_extracted.nc") + else: + files = glob.glob("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/"+"COMBINE3D_ACONC_v531_gcc_AQF5X_"+i+"_extracted.nc") + for j in files: + + df = xr.open_dataset(j) + for k in t: + # O3 variable + # O3 variable + oo=df.variables['O3'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + aa.append(o3tp) + # NO2 + oo=df.variables['NO2'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + bb.append(o3tp) + # CO + oo=df.variables['CO'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + cc.append(o3tp) + + # PM25_CO + oo=df.variables['PM25_OC'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + ee.append(o3tp) + + + # read emission results + # old files before 20210315 are not in diractory. must choose later date. + if int(i)>=20191231 and int(i)<=20210902: + files = glob.glob("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/"+"emis_mole_all_"+i+"_AQF5X_nobeis_2016fh_16j.ncf") + elif int(i)==20220303: + files = glob.glob("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/"+"emis_mole_all_"+i+"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf") + +# set todays date if they don't change dataformate +# else if int(i)>=20220313 and int(i)<=int(today): + elif int(i)>=20220313 and int(i)<=20220331: + files = glob.glob("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/"+"emis_mole_all_"+i+"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf") + for j in files: + + df = xr.open_dataset(j) + for k in t: + # CO variable + oo=df.variables['CO'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + ff.append(o3tp) + + # NO +# oo=df.variables['NO'][:].values[k,0] +# oo3=np.ravel(oo) +# o3tp=np.transpose(oo3) +# o3tp=np.round(o3tp) +# hh.append(o3tp) + +# read mcip results +# date must be later of 20210101 + files = glob.glob("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/"+"METCRO2D_"+i+".nc") + for j in files: + df = xr.open_dataset(j) + for k in t: + # CO variable + oo=df.variables['PRSFC'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + ii.append(o3tp) + # NO2 + oo=df.variables['PBL'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + jj.append(o3tp) + # NO + oo=df.variables['TEMP2'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + kk.append(o3tp) + # NO + oo=df.variables['WSPD10'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + ll.append(o3tp) + # NO + oo=df.variables['WDIR10'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + mm.append(o3tp) + + # NO + oo=df.variables['RGRND'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + oo1.append(o3tp) + + # NO2 + oo=df.variables['CFRAC'][:].values[k,0] + oo3=np.ravel(oo) + o3tp=np.transpose(oo3) + o3tp=np.round(o3tp) + rr.append(o3tp) + + +cmaq_O3=list(np.concatenate(aa).flat) +print(len(cmaq_O3)) +del aa +cmaq_NO2=list(np.concatenate(bb).flat) +print(len(cmaq_NO2)) +del bb +cmaq_CO=list(np.concatenate(cc).flat) +print(len(cmaq_CO)) +del cc + +cmaq_PM25_CO=list(np.concatenate(ee).flat) + +del ee +CO_emi=list(np.concatenate(ff).flat) +print(len(CO_emi)) +del ff + +#NO_emi=list(np.concatenate(hh).flat) +#del hh +PRSFC=list(np.concatenate(ii).flat) +del ii +PBL=list(np.concatenate(jj).flat) +del jj +TEMP2=list(np.concatenate(kk).flat) +del kk +WSPD10=list(np.concatenate(ll).flat) +del ll +WDIR10=list(np.concatenate(mm).flat) +del mm + +RGRND=list(np.concatenate(oo1).flat) +del oo1 +#RN=list(np.concatenate(pp).flat) +#del pp +#RC=list(np.concatenate(qq).flat) +#del qq +CFRAC=list(np.concatenate(rr).flat) +print(len(CFRAC)) +del rr + +## selecting lat and long +df = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D') +lat_1 = df.variables['LAT'][:].values[0,0] +lat_flt=np.ravel(lat_1) +# need to manipulate 48 values if the next day data is available +LAT=np.tile(lat_flt,len(days)*24) +print(len(LAT)) +# long +lon_1 = df.variables['LON'][:].values[0,0] +lon_flt=np.ravel(lon_1) +# need to manipulate 48 values if the next day data is available +LON=np.tile(lon_flt,len(days)*24) +print(len(LON)) +# creating dataframe + +## creatime date-time dimension +# date-time dimension for today +time0=[] +t = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11'] +for i in days: + for j in t: + time_0=np.full((265,442),i+j) + time0.append(time_0) +YYMMDDHH=list(np.concatenate(time0).flat) +print(len(YYMMDDHH)) + + +# saving variables +dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC}) +print(dat.head()) +dat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False) + + + diff --git a/code/processing_test_netcdf.py b/code/processing_test_netcdf.py new file mode 100644 index 0000000..218f588 --- /dev/null +++ b/code/processing_test_netcdf.py @@ -0,0 +1,39 @@ +import xarray as xr +import pandas as pd +import glob, os +import numpy as np +from pathlib import Path +import datetime +from datetime import timedelta +# home directory +home = str(Path.home()) + +base = datetime.datetime.today() - timedelta(days=2) +date_list = [base - timedelta(days=x) for x in range(2)] +days = [date.strftime('%Y%m%d') for date in date_list] + +# nc file need to correspond to the same prediction date in "/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv" + +print("Opening: "+"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_"+days[1]+"_extracted.nc") + +df_cdf = xr.open_dataset("/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_"+days[1]+"_extracted.nc") + +df_csv = pd.read_csv("/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv") +df_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str) +df_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+"|"+days[0], case = False, regex=True)] +df_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')] + +# Reshape "prediction/Latitude/Longitude" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442) +reshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442) + +# Remove "LAY" Dimension in O3 variable already in nc file. +reduced_dim = df_cdf['O3'].sel(LAY=1, drop=True) +# Swap values from original nc file with new prediction data +reduced_dim.values = reshaped_prediction + +# Apply changes to data variable in nc file +df_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction) + +df_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc') + +print('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc') diff --git a/code/rf_prediction.py b/code/rf_prediction.py new file mode 100644 index 0000000..90a4749 --- /dev/null +++ b/code/rf_prediction.py @@ -0,0 +1,30 @@ + +# Importing necessary libraries +import pandas as pd +import pickle +from pathlib import Path +from time import sleep + +# home directory +home = str(Path.home()) +# importing data +final=pd.read_csv("/groups/ESS/aalnaim/cmaq/testing.csv") +print(final.head()) +X = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1) +# defining testing variables +# processing test data + +# load the model from disk +filename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav' +#filename = 'D:/Research/CMAQ/local_test/xgboost.sav' +loaded_model = pickle.load(open(filename, 'rb')) + +# making prediction +pred = loaded_model.predict(X) + +# adding prediction values to test dataset +final['prediction'] = pred.tolist() + +final = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']] +# saving the dataset into local drive +final.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False) diff --git a/code/test_data.py b/code/test_data.py new file mode 100644 index 0000000..0424d8b --- /dev/null +++ b/code/test_data.py @@ -0,0 +1,15 @@ +import pandas as pd +from pathlib import Path + +# home directory +home = str(Path.home()) +cmaq=pd.read_csv("/groups/ESS/aalnaim/cmaq/test_data.csv") + +# dropping unnecessary variables +cmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str) +cmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6] +cmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8] +cmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10] + +#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1) +cmaq.to_csv("/groups/ESS/aalnaim/cmaq/testing.csv",index=False) diff --git a/history/1KzgttZ8Vy2DjouN52.json b/history/1KzgttZ8Vy2DjouN52.json new file mode 100644 index 0000000..3dfe789 --- /dev/null +++ b/history/1KzgttZ8Vy2DjouN52.json @@ -0,0 +1,31 @@ +[{ + "history_id" : "py4wx8nmbmr", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1654535473068, + "history_end_time" : 1655255389515, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "wn2hbzhu2ay", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nprint(\"IT WORKED\")\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\n", + "history_output" : "Running", + "history_begin_time" : 1654535458975, + "history_end_time" : 1654535464146, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "0dbnqvd4erk", + "history_input" : "#!/bin/bash\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268227\n", + "history_begin_time" : 1654535465938, + "history_end_time" : 1654535470564, + "history_notes" : null, + "history_process" : "rjm4qm", + "host_id" : "p6wvf2", + "indicator" : "Done" +}] diff --git a/history/53Tc6ThNV1FS28LSag.json b/history/53Tc6ThNV1FS28LSag.json new file mode 100644 index 0000000..8aca9ae --- /dev/null +++ b/history/53Tc6ThNV1FS28LSag.json @@ -0,0 +1,71 @@ +[{ + "history_id" : "n6lv58mihra", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428421513, + "history_end_time" : 1656428429351, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "tlpe3au2vjr", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(3)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[2]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428425283, + "history_end_time" : 1656428429355, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "gopwj98w0pu", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nwarning:_NclOpenFile: cannot open file ; No such file or directory\n\n\nfatal:file (cdf_file1) isn't defined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 26 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/testPlot*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Map_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656428429051, + "history_end_time" : 1656428434766, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "80uk9m9po5w", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656428429367, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "vf16mbs2yqi", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656428429374, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "vwnsae06o0m", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428411868, + "history_end_time" : 1656428429384, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "gs063ewii5p", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428417899, + "history_end_time" : 1656428429386, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +}] diff --git a/history/5JvyyZEPJp4UIfaebB.json b/history/5JvyyZEPJp4UIfaebB.json new file mode 100644 index 0000000..7801eb4 --- /dev/null +++ b/history/5JvyyZEPJp4UIfaebB.json @@ -0,0 +1,31 @@ +[{ + "history_id" : "7wczndrdr2h", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/\"Map_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656459810867, + "history_end_time" : 1656460094816, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "3m8h86qnfzm", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/\"Airnow_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 9684 in file /opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\n\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 9766 in file /opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\n\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 159 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_20220625.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656459811268, + "history_end_time" : 1656459817777, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "zo92h4o0v63", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_\"$YYYYMMDD_POST\".nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 81 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656460094843, + "history_end_time" : 1656460099774, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : "p6wvf2", + "indicator" : "Done" +}] diff --git a/history/6JBbBd5kUuBwJdfUAF.json b/history/6JBbBd5kUuBwJdfUAF.json new file mode 100644 index 0000000..b78983b --- /dev/null +++ b/history/6JBbBd5kUuBwJdfUAF.json @@ -0,0 +1,41 @@ +[{ + "history_id" : "p72xum4kath", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(3)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[2]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "history_output" : "Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220625_ML_extracted.nc\n", + "history_begin_time" : 1656453929138, + "history_end_time" : 1656454194243, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "71vxoc5s3s6", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\n(0)\tgsn_open_wks: Error: directory '/groups/ESS/aalnaim/cmaq/plots/' does not exist or lacks write permissions.\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/testPlot*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Map_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656454195167, + "history_end_time" : 1656454202119, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "qp3u62b701h", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\n(0)\tgsn_open_wks: Error: directory '/groups/ESS/aalnaim/cmaq/plots/' does not exist or lacks write permissions.\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454202290, + "history_end_time" : 1656454210067, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "1mafcnesnf9", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\n(0)\tgsn_open_wks: Error: directory '/groups/ESS/aalnaim/cmaq/plots/' does not exist or lacks write permissions.\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454210163, + "history_end_time" : 1656454214584, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +}] diff --git a/history/DGAiNyR7m5GpH9X3EZ.json b/history/DGAiNyR7m5GpH9X3EZ.json new file mode 100644 index 0000000..a5d9055 --- /dev/null +++ b/history/DGAiNyR7m5GpH9X3EZ.json @@ -0,0 +1,21 @@ +[{ + "history_id" : "n6gawnwlgjz", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nsh: ffmpeg: command not found\nsh: ffmpeg: command not found\n", + "history_begin_time" : 1654542716355, + "history_end_time" : 1654542762828, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "9eib9x5vep4", + "history_input" : "#!/bin/bash\nmodule load ffmpeg", + "history_output" : "Running", + "history_begin_time" : 1654542710848, + "history_end_time" : 1654542714378, + "history_notes" : null, + "history_process" : "ah91af", + "host_id" : "p6wvf2", + "indicator" : "Done" +}] diff --git a/history/JrA7u9H3hPw3UK3Emy.json b/history/JrA7u9H3hPw3UK3Emy.json new file mode 100644 index 0000000..0ca6256 --- /dev/null +++ b/history/JrA7u9H3hPw3UK3Emy.json @@ -0,0 +1,51 @@ +[{ + "history_id" : "gxs47n3fu63", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022062612 ... 6 26 12\n1 21.855751 -120.512500 2022062612 ... 6 26 12\n2 21.882309 -120.404144 2022062612 ... 6 26 12\n3 21.908745 -120.295715 2022062612 ... 6 26 12\n4 21.935051 -120.187225 2022062612 ... 6 26 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1656451082067, + "history_end_time" : 1656452485876, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "tyaio8q7nov", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485883, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "b52wrb6e5b0", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485892, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "7sp4kvy7u51", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485898, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "mmqmr29btkk", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485903, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +}] diff --git a/history/PTOgwZ3PypIaSRU7OQ.json b/history/PTOgwZ3PypIaSRU7OQ.json new file mode 100644 index 0000000..76a94fe --- /dev/null +++ b/history/PTOgwZ3PypIaSRU7OQ.json @@ -0,0 +1,31 @@ +[{ + "history_id" : "gbutef3h8ad", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/\"Map_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656455485471, + "history_end_time" : 1656455761971, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "7gulavmvp4p", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/\"Airnow_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656455485201, + "history_end_time" : 1656455769632, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "1om9blf3b9v", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_\"$YYYYMMDD_POST\".nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 81 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656455770012, + "history_end_time" : 1656455774247, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : "p6wvf2", + "indicator" : "Done" +}] diff --git a/history/YorzGVBJ7a4NxCUzbh.json b/history/YorzGVBJ7a4NxCUzbh.json new file mode 100644 index 0000000..0a7bc9f --- /dev/null +++ b/history/YorzGVBJ7a4NxCUzbh.json @@ -0,0 +1,71 @@ +[{ + "history_id" : "qaas9oll99a", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022062612 ... 6 26 12\n1 21.855751 -120.512500 2022062612 ... 6 26 12\n2 21.882309 -120.404144 2022062612 ... 6 26 12\n3 21.908745 -120.295715 2022062612 ... 6 26 12\n4 21.935051 -120.187225 2022062612 ... 6 26 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1656428855032, + "history_end_time" : 1656450968023, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "d4w6nlhuaxi", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968039, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "99qp90f5zc2", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968050, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "71qdytphclv", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968055, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "qc0f1msqrxb", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968059, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "orxddctze9j", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220626\n20220625\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022062612 ... 9.0 592.0 0.0\n1 21.855751 -120.512497 2022062612 ... 9.0 590.0 0.0\n2 21.882309 -120.404144 2022062612 ... 9.0 589.0 0.0\n3 21.908745 -120.295715 2022062612 ... 9.0 587.0 0.0\n4 21.935051 -120.187225 2022062612 ... 8.0 585.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1656428452856, + "history_end_time" : 1656450968066, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "h2sxykdhd89", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "history_output" : "Running", + "history_begin_time" : 1656428784905, + "history_end_time" : 1656450968069, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +}] diff --git a/history/m06UzLLG9xtjIwz4E4.json b/history/m06UzLLG9xtjIwz4E4.json new file mode 100644 index 0000000..ac2d72d --- /dev/null +++ b/history/m06UzLLG9xtjIwz4E4.json @@ -0,0 +1,21 @@ +[{ + "history_id" : "5thy8s0bg8x", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1654493107576, + "history_end_time" : 1654500507415, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "2x6aznc7yzz", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : " File \"prediction_maps.py\", line 110\n imageio.mimsave(\"/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n ^\nSyntaxError: EOL while scanning string literal\n", + "history_begin_time" : 1654500507944, + "history_end_time" : 1654500512174, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Failed" +}] diff --git a/history/process_3asyzj.json b/history/process_3asyzj.json new file mode 100644 index 0000000..4cc48be --- /dev/null +++ b/history/process_3asyzj.json @@ -0,0 +1,141 @@ +[{ + "history_id" : "zl22tXflXGc0", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\n\nprint(\"Opening: \"+\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\")\n \ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "history_output" : "Opening: /groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220625_extracted.nc\nSaved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220625_ML_extracted.nc\n", + "history_begin_time" : 1656459498325, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "kdBDR4zuGRKz", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\n\nprint(\"Opening: \"+\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\"_\n \ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "history_output" : " File \"processing_test_netcdf.py\", line 17\n print(\"Opening: \"+\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[1]+\"_extracted.nc\"_\n ^\nSyntaxError: invalid syntax\n", + "history_begin_time" : 1656459474602, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "p72xum4kath", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(3)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[2]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "history_output" : "Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220625_ML_extracted.nc\n", + "history_begin_time" : 1656453929138, + "history_end_time" : 1656454194243, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "tlpe3au2vjr", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(3)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+days[2]+\"_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(days[1]+\"|\"+days[0], case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > days[1]+'11') & (df_filt['YYYYMMDDHH'] < days[0]+'12')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')\n\nprint('Saved updated netCDF file: /groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_'+days[1]+'_ML_extracted.nc')", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428425283, + "history_end_time" : 1656428429355, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "60hZgMCFuH2u", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613|20220614\", case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > '2022061311') & (df_filt['YYYYMMDDHH'] < '2022061412')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc')", + "history_output" : "Running", + "history_begin_time" : 1655501956099, + "history_end_time" : 1655503046217, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "PoORHAfZg0Ux", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613|20220614\", case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > '2022061311') & (df_filt['YYYYMMDDHH'] < '2022061412')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc')", + "history_output" : null, + "history_begin_time" : 1655501841903, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "WICvybCO3non", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613|20220614\", case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > '2022061312') & (df_filt['YYYYMMDDHH'] < '2022061412')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc')", + "history_output" : "", + "history_begin_time" : 1655496063976, + "history_end_time" : 1655501871999, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "9wdqy7efw4qC", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613|20220614\", case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > '2022061312') & (df_filt['YYYYMMDDHH'] < '2022061412')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc')", + "history_output" : null, + "history_begin_time" : 1655495993831, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "beeVZaInXI6r", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613|20220614\", case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > '2022061312') & (df_filt['YYYYMMDDHH'] < '2022061412')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc')", + "history_output" : "", + "history_begin_time" : 1655494545852, + "history_end_time" : 1655496023888, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "INqHz0JSeptf", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613|20220614\", case = False, regex=True)]\ndf_filt = df_filt[(df_filt['YYYYMMDDHH'] > '2022061312') & (df_filt['YYYYMMDDHH'] < '2022061412')]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc')", + "history_output" : "Running", + "history_begin_time" : 1655493655846, + "history_end_time" : 1655494421371, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "3nKofAxIaXVd", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to correspond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613\")]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\nreshaped_lat = np.atleast_3d(df_filt['Latitude']).reshape(-1, 265, 442)\nreshaped_lon = np.atleast_3d(df_filt['Longitude']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\ndf_cdf['LAT'] = (['TSTEP', 'ROW', 'COL'], reshaped_lat)\ndf_cdf['LON'] = (['TSTEP', 'ROW', 'COL'], reshaped_lon)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc')", + "history_output" : "Running", + "history_begin_time" : 1655406353871, + "history_end_time" : 1655493635343, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "f2HBptDwAyBk", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\n# nc file need to corespond to the same prediction date in \"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\"\ndf_cdf = xr.open_dataset(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc\")\n\ndf_csv = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv\")\ndf_csv['YYYYMMDDHH'] = df_csv['YYYYMMDDHH'].astype(str)\ndf_filt = df_csv[df_csv['YYYYMMDDHH'].str.contains(\"20220613\")]\n\n# Reshape \"prediction/Latitude/Longitude\" columns to (TSTEP, ROW, COL), these lines will reshape data into (24, 265, 442)\nreshaped_prediction = np.atleast_3d(df_filt['prediction']).reshape(-1, 265, 442)\nreshaped_lat = np.atleast_3d(df_filt['Latitude']).reshape(-1, 265, 442)\nreshaped_lon = np.atleast_3d(df_filt['Longitude']).reshape(-1, 265, 442)\n\n# Remove \"LAY\" Dimension in O3 variable already in nc file.\nreduced_dim = df_cdf['O3'].sel(LAY=1, drop=True)\n# Swap values from original nc file with new prediction data\nreduced_dim.values = reshaped_prediction\n\n# Apply changes to data variable in nc file\ndf_cdf['O3'] = (['TSTEP', 'ROW', 'COL'], reshaped_prediction)\ndf_cdf['LAT'] = (['TSTEP', 'ROW', 'COL'], reshaped_lat)\ndf_cdf['LON'] = (['TSTEP', 'ROW', 'COL'], reshaped_lon)\n\ndf_cdf.to_netcdf('/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_extracted.nc')", + "history_output" : null, + "history_begin_time" : 1655406212085, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "d4w6nlhuaxi", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968039, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "tyaio8q7nov", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485883, + "history_notes" : null, + "history_process" : "3asyzj", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},] diff --git a/history/process_6oifw4.json b/history/process_6oifw4.json new file mode 100644 index 0000000..a3a516a --- /dev/null +++ b/history/process_6oifw4.json @@ -0,0 +1,981 @@ +[{ + "history_id" : "cKXAvLlgJ17G", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n \nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n \nadd_colorbar(fig, ax1)\nadd_colorbar(fig, ax2)\nadd_colorbar(fig, ax3)\nadd_colorbar(fig, ax4) \n \n \nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061312\nSaving Prediction Map: 2022-06-13 (Time: 12)\nPlotting... 2022061313\nSaving Prediction Map: 2022-06-13 (Time: 13)\nPlotting... 2022061314\nSaving Prediction Map: 2022-06-13 (Time: 14)\nPlotting... 2022061315\nSaving Prediction Map: 2022-06-13 (Time: 15)\nPlotting... 2022061316\nSaving Prediction Map: 2022-06-13 (Time: 16)\nPlotting... 2022061317\nSaving Prediction Map: 2022-06-13 (Time: 17)\nPlotting... 2022061318\nSaving Prediction Map: 2022-06-13 (Time: 18)\nPlotting... 2022061319\nSaving Prediction Map: 2022-06-13 (Time: 19)\nPlotting... 2022061320\n", + "history_begin_time" : 1655312270049, + "history_end_time" : 1655406242239, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "kRolBJc1jNml", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n \nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n \n#add_colorbar(fig, ax1)\n#add_colorbar(fig, ax2)\n#add_colorbar(fig, ax3)\n#add_colorbar(fig, ax4) \n \n \nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061312\nSaving Prediction Map: 2022-06-13 (Time: 12)\nPlotting... 2022061313\nSaving Prediction Map: 2022-06-13 (Time: 13)\nPlotting... 2022061314\nSaving Prediction Map: 2022-06-13 (Time: 14)\nPlotting... 2022061315\nSaving Prediction Map: 2022-06-13 (Time: 15)\nPlotting... 2022061316\nSaving Prediction Map: 2022-06-13 (Time: 16)\nPlotting... 2022061317\nSaving Prediction Map: 2022-06-13 (Time: 17)\nPlotting... 2022061318\nSaving Prediction Map: 2022-06-13 (Time: 18)\n", + "history_begin_time" : 1655306883159, + "history_end_time" : 1655307558934, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "I46rX45OXPCl", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n \nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n \n#add_colorbar(fig, ax1)\n#add_colorbar(fig, ax2)\n#add_colorbar(fig, ax3)\n#add_colorbar(fig, ax4) \n \n \nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : null, + "history_begin_time" : 1655306825984, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "jDbfFXybpsOh", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n \nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n \nadd_colorbar(fig, ax1)\nadd_colorbar(fig, ax2)\nadd_colorbar(fig, ax3)\nadd_colorbar(fig, ax4) \n \n \nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061312\nSaving Prediction Map: 2022-06-13 (Time: 12)\nPlotting... 2022061313\nSaving Prediction Map: 2022-06-13 (Time: 13)\nPlotting... 2022061314\nSaving Prediction Map: 2022-06-13 (Time: 14)\nPlotting... 2022061315\nSaving Prediction Map: 2022-06-13 (Time: 15)\nPlotting... 2022061316\nSaving Prediction Map: 2022-06-13 (Time: 16)\nPlotting... 2022061317\nSaving Prediction Map: 2022-06-13 (Time: 17)\nPlotting... 2022061318\nSaving Prediction Map: 2022-06-13 (Time: 18)\nPlotting... 2022061319\nSaving Prediction Map: 2022-06-13 (Time: 19)\nPlotting... 2022061320\n", + "history_begin_time" : 1655303850689, + "history_end_time" : 1655306856026, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "l3GvuKN58y4X", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061100.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061101.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061102.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061103.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061104.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061105.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061106.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061107.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061108.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061109.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061110.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061111.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061112.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061113.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061114.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061115.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061116.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061117.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061118.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061119.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061120.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061121.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061122.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061123.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061200.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061201.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061202.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061203.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061204.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061205.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061206.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061207.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061208.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061209.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061210.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061211.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061212.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061213.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061214.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061215.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061216.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061217.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061218.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061219.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061220.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061221.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061222.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061223.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061300.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061301.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061302.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061303.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061304.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061305.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061306.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061307.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061308.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061309.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061310.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061311.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061313.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061314.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061315.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061316.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061317.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061318.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061319.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061320.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061321.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061322.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061323.tif\nGenerating prediction.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/prediction.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:07.10, start: 0.000000, bitrate: 4 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 640x480, 2 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/prediction.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1500, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0x148acc0] \u001B[0m4(+1) colors generated out of 4 colors; ratio=1.000000\nframe= 3 fps=2.0 q=-0.0 size= 6kB time=00:00:00.21 bitrate= 219.9kbits/s speed=0.137x \nframe= 12 fps=5.8 q=-0.0 size= 6kB time=00:00:01.11 bitrate= 43.2kbits/s speed=0.54x \nframe= 21 fps=8.1 q=-0.0 size= 6kB time=00:00:02.01 bitrate= 24.8kbits/s speed=0.771x \nframe= 30 fps=9.5 q=-0.0 size= 6kB time=00:00:02.91 bitrate= 17.7kbits/s speed=0.922x \nframe= 39 fps= 11 q=-0.0 size= 7kB time=00:00:03.81 bitrate= 14.0kbits/s speed=1.03x \nframe= 48 fps= 11 q=-0.0 size= 7kB time=00:00:04.71 bitrate= 11.7kbits/s speed=1.11x \nframe= 57 fps= 12 q=-0.0 size= 7kB time=00:00:05.61 bitrate= 10.2kbits/s speed=1.17x \nframe= 66 fps= 12 q=-0.0 size= 7kB time=00:00:06.51 bitrate= 9.0kbits/s speed=1.22x \nframe= 70 fps= 12 q=-0.0 Lsize= 7kB time=00:00:06.91 bitrate= 8.7kbits/s speed=1.23x \nvideo:7kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.268312%\nDone prediction.gif !!!\n", + "history_begin_time" : 1655303587784, + "history_end_time" : 1655306726671, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "vZpUSktu1naw", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n \nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n \nadd_colorbar(fig, ax1)\nadd_colorbar(fig, ax2)\nadd_colorbar(fig, ax3)\nadd_colorbar(fig, ax4) \n \n \nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n plt.close()\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061312\nSaving Prediction Map: 2022-06-13 (Time: 12)\nPlotting... 2022061313\nSaving Prediction Map: 2022-06-13 (Time: 13)\nPlotting... 2022061314\nSaving Prediction Map: 2022-06-13 (Time: 14)\nPlotting... 2022061315\nSaving Prediction Map: 2022-06-13 (Time: 15)\nPlotting... 2022061316\nSaving Prediction Map: 2022-06-13 (Time: 16)\nPlotting... 2022061317\nSaving Prediction Map: 2022-06-13 (Time: 17)\nPlotting... 2022061318\nSaving Prediction Map: 2022-06-13 (Time: 18)\nPlotting... 2022061319\nSaving Prediction Map: 2022-06-13 (Time: 19)\nPlotting... 2022061320\nSaving Prediction Map: 2022-06-13 (Time: 20)\nPlotting... 2022061321\nSaving Prediction Map: 2022-06-13 (Time: 21)\nPlotting... 2022061322\nSaving Prediction Map: 2022-06-13 (Time: 22)\nPlotting... 2022061323\nSaving Prediction Map: 2022-06-13 (Time: 23)\nPlotting... 2022061300\nSaving Prediction Map: 2022-06-13 (Time: 0)\nPlotting... 2022061301\nSaving Prediction Map: 2022-06-13 (Time: 1)\nPlotting... 2022061302\nSaving Prediction Map: 2022-06-13 (Time: 2)\nPlotting... 2022061303\nSaving Prediction Map: 2022-06-13 (Time: 3)\nPlotting... 2022061304\nSaving Prediction Map: 2022-06-13 (Time: 4)\nPlotting... 2022061305\nSaving Prediction Map: 2022-06-13 (Time: 5)\nPlotting... 2022061306\nSaving Prediction Map: 2022-06-13 (Time: 6)\nPlotting... 2022061307\nSaving Prediction Map: 2022-06-13 (Time: 7)\nPlotting... 2022061308\nSaving Prediction Map: 2022-06-13 (Time: 8)\nPlotting... 2022061309\nSaving Prediction Map: 2022-06-13 (Time: 9)\nPlotting... 2022061310\nSaving Prediction Map: 2022-06-13 (Time: 10)\nPlotting... 2022061311\nSaving Prediction Map: 2022-06-13 (Time: 11)\nPlotting... 2022061212\nSaving Prediction Map: 2022-06-12 (Time: 12)\nPlotting... 2022061213\nSaving Prediction Map: 2022-06-12 (Time: 13)\nPlotting... 2022061214\nSaving Prediction Map: 2022-06-12 (Time: 14)\nPlotting... 2022061215\nSaving Prediction Map: 2022-06-12 (Time: 15)\nPlotting... 2022061216\nSaving Prediction Map: 2022-06-12 (Time: 16)\nPlotting... 2022061217\nSaving Prediction Map: 2022-06-12 (Time: 17)\nPlotting... 2022061218\nSaving Prediction Map: 2022-06-12 (Time: 18)\nPlotting... 2022061219\nSaving Prediction Map: 2022-06-12 (Time: 19)\nPlotting... 2022061220\nSaving Prediction Map: 2022-06-12 (Time: 20)\nPlotting... 2022061221\nSaving Prediction Map: 2022-06-12 (Time: 21)\nPlotting... 2022061222\nSaving Prediction Map: 2022-06-12 (Time: 22)\nPlotting... 2022061223\nSaving Prediction Map: 2022-06-12 (Time: 23)\nPlotting... 2022061200\nSaving Prediction Map: 2022-06-12 (Time: 0)\nPlotting... 2022061201\nSaving Prediction Map: 2022-06-12 (Time: 1)\nPlotting... 2022061202\nSaving Prediction Map: 2022-06-12 (Time: 2)\nPlotting... 2022061203\nSaving Prediction Map: 2022-06-12 (Time: 3)\nPlotting... 2022061204\nSaving Prediction Map: 2022-06-12 (Time: 4)\nPlotting... 2022061205\nSaving Prediction Map: 2022-06-12 (Time: 5)\nPlotting... 2022061206\nSaving Prediction Map: 2022-06-12 (Time: 6)\nPlotting... 2022061207\nSaving Prediction Map: 2022-06-12 (Time: 7)\nPlotting... 2022061208\nSaving Prediction Map: 2022-06-12 (Time: 8)\nPlotting... 2022061209\nSaving Prediction Map: 2022-06-12 (Time: 9)\nPlotting... 2022061210\nSaving Prediction Map: 2022-06-12 (Time: 10)\nPlotting... 2022061211\nSaving Prediction Map: 2022-06-12 (Time: 11)\nPlotting... 2022061112\nSaving Prediction Map: 2022-06-11 (Time: 12)\nPlotting... 2022061113\nSaving Prediction Map: 2022-06-11 (Time: 13)\nPlotting... 2022061114\nSaving Prediction Map: 2022-06-11 (Time: 14)\nPlotting... 2022061115\nSaving Prediction Map: 2022-06-11 (Time: 15)\nPlotting... 2022061116\nSaving Prediction Map: 2022-06-11 (Time: 16)\nPlotting... 2022061117\nSaving Prediction Map: 2022-06-11 (Time: 17)\nPlotting... 2022061118\nSaving Prediction Map: 2022-06-11 (Time: 18)\nPlotting... 2022061119\nSaving Prediction Map: 2022-06-11 (Time: 19)\nPlotting... 2022061120\nSaving Prediction Map: 2022-06-11 (Time: 20)\nPlotting... 2022061121\nSaving Prediction Map: 2022-06-11 (Time: 21)\nPlotting... 2022061122\nSaving Prediction Map: 2022-06-11 (Time: 22)\nPlotting... 2022061123\nSaving Prediction Map: 2022-06-11 (Time: 23)\nPlotting... 2022061100\nSaving Prediction Map: 2022-06-11 (Time: 0)\nPlotting... 2022061101\nSaving Prediction Map: 2022-06-11 (Time: 1)\nPlotting... 2022061102\nSaving Prediction Map: 2022-06-11 (Time: 2)\nPlotting... 2022061103\nSaving Prediction Map: 2022-06-11 (Time: 3)\nPlotting... 2022061104\nSaving Prediction Map: 2022-06-11 (Time: 4)\nPlotting... 2022061105\nSaving Prediction Map: 2022-06-11 (Time: 5)\nPlotting... 2022061106\nSaving Prediction Map: 2022-06-11 (Time: 6)\nPlotting... 2022061107\nSaving Prediction Map: 2022-06-11 (Time: 7)\nPlotting... 2022061108\nSaving Prediction Map: 2022-06-11 (Time: 8)\nPlotting... 2022061109\nSaving Prediction Map: 2022-06-11 (Time: 9)\nPlotting... 2022061110\nSaving Prediction Map: 2022-06-11 (Time: 10)\nPlotting... 2022061111\nSaving Prediction Map: 2022-06-11 (Time: 11)\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061100.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061101.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061102.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061103.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061104.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061105.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061106.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061107.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061108.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061109.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061110.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061111.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061112.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061113.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061114.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061115.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061116.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061117.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061118.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061119.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061120.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061121.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061122.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061123.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061200.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061201.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061202.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061203.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061204.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061205.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061206.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061207.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061208.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061209.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061210.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061211.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061212.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061213.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061214.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061215.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061216.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061217.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061218.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061219.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061220.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061221.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061222.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061223.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061300.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061301.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061302.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061303.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061304.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061305.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061306.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061307.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061308.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061309.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061310.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061311.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061312.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061313.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061314.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061315.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061316.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061317.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061318.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061319.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061320.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061321.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061322.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061323.tif\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 163, in \n imageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/v2.py\", line 331, in mimwrite\n return file.write(ims, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/core/legacy_plugin_wrapper.py\", line 216, in write\n writer.append_data(ndimage)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/core/format.py\", line 589, in append_data\n return self._append_data(im, total_meta)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/plugins/ffmpeg.py\", line 597, in _append_data\n raise ValueError(\"All images in a movie should have same size\")\nValueError: All images in a movie should have same size\n", + "history_begin_time" : 1655301328450, + "history_end_time" : 1655306726078, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "W9qIf3QsS2nC", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n \nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n \nadd_colorbar(fig, ax1)\nadd_colorbar(fig, ax2)\nadd_colorbar(fig, ax3)\nadd_colorbar(fig, ax4) \n \n \nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061312\nSaving Prediction Map: 2022-06-13 (Time: 12)\nPlotting... 2022061313\nSaving Prediction Map: 2022-06-13 (Time: 13)\nPlotting... 2022061314\nSaving Prediction Map: 2022-06-13 (Time: 14)\nPlotting... 2022061315\nSaving Prediction Map: 2022-06-13 (Time: 15)\nPlotting... 2022061316\nSaving Prediction Map: 2022-06-13 (Time: 16)\nPlotting... 2022061317\nSaving Prediction Map: 2022-06-13 (Time: 17)\nPlotting... 2022061318\n", + "history_begin_time" : 1655299915664, + "history_end_time" : 1655301241322, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "0Ni4U8FbQHlf", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(\"/groups/ESS/aalnaim/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(fig, ax1)\n add_colorbar(fig, ax2)\n add_colorbar(fig, ax3)\n add_colorbar(fig, ax4) \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061312\nSaving Prediction Map: 2022-06-13 (Time: 12)\nPlotting... 2022061313\nSaving Prediction Map: 2022-06-13 (Time: 13)\nPlotting... 2022061314\nSaving Prediction Map: 2022-06-13 (Time: 14)\nPlotting... 2022061315\nSaving Prediction Map: 2022-06-13 (Time: 15)\nPlotting... 2022061316\nSaving Prediction Map: 2022-06-13 (Time: 16)\nPlotting... 2022061317\n", + "history_begin_time" : 1655280159331, + "history_end_time" : 1655301109378, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "tX0zubQfQBeZ", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(fig, ax1)\n add_colorbar(fig, ax2)\n add_colorbar(fig, ax3)\n add_colorbar(fig, ax4) \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\nPlotting... 2022061013\nSaving Prediction Map: 2022-06-10 (Time: 13)\nPlotting... 2022061014\nSaving Prediction Map: 2022-06-10 (Time: 14)\nPlotting... 2022061015\nSaving Prediction Map: 2022-06-10 (Time: 15)\nPlotting... 2022061016\nSaving Prediction Map: 2022-06-10 (Time: 16)\nPlotting... 2022061017\nSaving Prediction Map: 2022-06-10 (Time: 17)\nPlotting... 2022061018\nSaving Prediction Map: 2022-06-10 (Time: 18)\nPlotting... 2022061019\nSaving Prediction Map: 2022-06-10 (Time: 19)\nPlotting... 2022061020\n", + "history_begin_time" : 1655266820610, + "history_end_time" : 1655275674812, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "Vgg8cUeFncZL", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(sm, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(fig, ax1)\n add_colorbar(fig, ax2)\n add_colorbar(fig, ax3)\n add_colorbar(fig, ax4) \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655265618498, + "history_end_time" : 1655275674212, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "CvVc0GRDzePg", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(fig, ax1)\n add_colorbar(fig, ax2)\n add_colorbar(fig, ax3)\n add_colorbar(fig, ax4) \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 129, in \n add_colorbar(fig, ax1)\n File \"prediction_maps.py\", line 57, in add_colorbar\n cb = fig.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 1158, in colorbar\n cb = cbar.Colorbar(cax, mappable, **cb_kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/colorbar.py\", line 394, in __init__\n if mappable.get_array() is not None:\nAttributeError: 'AxesSubplot' object has no attribute 'get_array'\n", + "history_begin_time" : 1655264966913, + "history_end_time" : 1655275673660, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "Ed5bY0ARoZYT", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(fig, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = fig.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=ax)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(fig, ax1)\n add_colorbar(fig, ax2)\n add_colorbar(fig, ax3)\n add_colorbar(fig, ax4) \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 129, in \n add_colorbar(fig, ax1)\n File \"prediction_maps.py\", line 58, in add_colorbar\n spacing='uniform', drawedges=True, pad=0.05, ax=ax)\nNameError: name 'ax' is not defined\n", + "history_begin_time" : 1655264807538, + "history_end_time" : 1655275673214, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "gjh7gY4dgV4s", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(fig):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=fig.get_axes())\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(fig)\n\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 129, in \n add_colorbar(fig)\n File \"prediction_maps.py\", line 57, in add_colorbar\n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/pyplot.py\", line 2088, in colorbar\n ret = gcf().colorbar(mappable, cax=cax, ax=ax, **kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 1158, in colorbar\n cb = cbar.Colorbar(cax, mappable, **cb_kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/colorbar.py\", line 394, in __init__\n if mappable.get_array() is not None:\nAttributeError: 'AxesSubplot' object has no attribute 'get_array'\n", + "history_begin_time" : 1655264491332, + "history_end_time" : 1655275672628, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "vZGdebI09tZI", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ndef add_colorbar(cmap, axes):\n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05, ax=axes)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n add_colorbar(cmap, ax1)\n add_colorbar(cmap, ax2)\n add_colorbar(cmap, ax3)\n add_colorbar(cmap, ax4)\n\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 129, in \n add_colorbar(cmap, ax1)\n File \"prediction_maps.py\", line 57, in add_colorbar\n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/pyplot.py\", line 2088, in colorbar\n ret = gcf().colorbar(mappable, cax=cax, ax=ax, **kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 1158, in colorbar\n cb = cbar.Colorbar(cax, mappable, **cb_kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/colorbar.py\", line 394, in __init__\n if mappable.get_array() is not None:\nAttributeError: 'AxesSubplot' object has no attribute 'get_array'\n", + "history_begin_time" : 1655264191520, + "history_end_time" : 1655275672131, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "d3fl7HmjNhpr", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : "Plotting... 2022061012\nprediction_maps.py:119: MatplotlibDeprecationWarning: Starting from Matplotlib 3.6, colorbar() will steal space from the mappable's axes, rather than from the current axes, to place the colorbar. To silence this warning, explicitly pass the 'ax' argument to colorbar().\n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4),\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 119, in \n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/pyplot.py\", line 2088, in colorbar\n ret = gcf().colorbar(mappable, cax=cax, ax=ax, **kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 1158, in colorbar\n cb = cbar.Colorbar(cax, mappable, **cb_kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/colorbar.py\", line 394, in __init__\n if mappable.get_array() is not None:\nAttributeError: 'AxesSubplot' object has no attribute 'get_array'\n", + "history_begin_time" : 1655263695117, + "history_end_time" : 1655275671464, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "TINugq9aB6Fa", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n\tsm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : " File \"prediction_maps.py\", line 117\n sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n ^\nTabError: inconsistent use of tabs and spaces in indentation\n", + "history_begin_time" : 1655263617007, + "history_end_time" : 1655275671007, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "CxSza2JazlTR", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n norm = mpl.colors.Normalize(vmin=0,vmax=80)\n\n\tsm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n \n cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), \tlocation='bottom', format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05)\n\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n \n\n\n", + "history_output" : null, + "history_begin_time" : 1655263557687, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "VMCpTPTNUYcl", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n \n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\nPlotting... 2022061013\nSaving Prediction Map: 2022-06-10 (Time: 13)\nPlotting... 2022061014\nSaving Prediction Map: 2022-06-10 (Time: 14)\nPlotting... 2022061015\nSaving Prediction Map: 2022-06-10 (Time: 15)\nPlotting... 2022061016\nSaving Prediction Map: 2022-06-10 (Time: 16)\nPlotting... 2022061017\nSaving Prediction Map: 2022-06-10 (Time: 17)\nPlotting... 2022061018\nSaving Prediction Map: 2022-06-10 (Time: 18)\nPlotting... 2022061019\nSaving Prediction Map: 2022-06-10 (Time: 19)\nPlotting... 2022061020\n", + "history_begin_time" : 1655255475406, + "history_end_time" : 1655263587731, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "1VGMtDtNEnUg", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n \n\n\n", + "history_output" : null, + "history_begin_time" : 1655255448990, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "IC9c1yL8KVaE", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=3, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655240382149, + "history_end_time" : 1655259490137, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "WhVHII1JcF3k", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=2, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=400, cmap=cmap, \n linewidths=2, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n \n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655240074660, + "history_end_time" : 1655259489401, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "mzkXkGBDp6so", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\nmpl.rcParams['axes.linewidth'] = 2\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=355, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=355, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655239278550, + "history_end_time" : 1655259488319, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "oH7Sp5wvORaZ", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=355, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=355, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.tight_layout()\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655239003214, + "history_end_time" : 1655259487803, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "54x8rocvNwLR", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\ngs = gridspec.GridSpec(2, 2, width_ratios=[1, 1]) \n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=455, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=455, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655236640876, + "history_end_time" : 1655259487154, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "6aM3pOyG0vA9", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import gridspec\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\ngs = gridspec.GridSpec(2, 2, width_ratios=[1, 1]) \n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=255, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=255, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655235775968, + "history_end_time" : 1655259486636, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "RZYEu8XEq7K7", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\ngs = gridspec.GridSpec(2, 2, width_ratios=[1, 1]) \n\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=255, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=255, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 48, in \n gs = gridspec.GridSpec(2, 2, width_ratios=[1, 1]) \nNameError: name 'gridspec' is not defined\n", + "history_begin_time" : 1655235251908, + "history_end_time" : 1655259486085, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "jA0AMRRxBWZK", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\nplt.gca().set_aspect('equal', adjustable='datalim')\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=155, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=155, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655234913611, + "history_end_time" : 1655259485491, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "bdRQQCyaqYwQ", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 65\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle)\n ax2.set_title(AirNowPlotTitle)\n ax3.set_title(cmaqActualPlotTitle)\n ax4.set_title(AirNowActualPlotTitle)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655234286345, + "history_end_time" : 1655259498092, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "GWIQbsUIdB5b", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(80, 50))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655233992740, + "history_end_time" : 1655259497227, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "mpA4FWnwKInZ", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(40, 35))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655233392181, + "history_end_time" : 1655259496595, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "QumKJnJJliwp", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(40, 35))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1, figsize=(40, 35))\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2, figsize=(40, 35))\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(40, 35))\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3, figsize=(40, 35))\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4, figsize=(40, 35))\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=95, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(40, 35))\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655233084395, + "history_end_time" : 1655259496092, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "q2LPJAboQFkQ", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(40, 30))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1, figsize=(35, 30))\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2, figsize=(35, 30))\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(35, 30))\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3, figsize=(35, 30))\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4, figsize=(35, 30))\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(35, 30))\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655232669026, + "history_end_time" : 1655259494694, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "P6yYncI9N0qJ", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(35, 25))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1, figsize=(30, 25))\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2, figsize=(30, 25))\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(30, 25))\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3, figsize=(30, 25))\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4, figsize=(30, 25))\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(30, 25))\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655230503724, + "history_end_time" : 1655259494232, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "QsMjsLPAANtK", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(25, 20))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1, figsize=(30, 25))\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2, figsize=(30, 25))\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(30, 25))\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3, figsize=(30, 25))\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4, figsize=(30, 25))\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(30, 25))\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655230175685, + "history_end_time" : 1655259493761, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "KvLph0FpNcOc", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(25, 20))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1, figsize=(20, 18))\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2, figsize=(20, 18))\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(20, 18))\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3, figsize=(20, 18))\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4, figsize=(20, 18))\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\", figsize=(20, 18))\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655229820868, + "history_end_time" : 1655259493201, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "DhUhu3T0zfTl", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(45, 18))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655229538054, + "history_end_time" : 1655229747660, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "S1htxOwVqf3Y", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(35, 15))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655229243236, + "history_end_time" : 1655229747092, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "vYELyGA9Gp0o", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(35, 15))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n\n\n\n\n", + "history_output" : null, + "history_begin_time" : 1655228926038, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "lDUvA8D8fX4z", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(35, 45))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n \n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061012.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (3500, 4500) to (3504, 4512) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nGenerating prediction.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/prediction.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:00.10, start: 0.000000, bitrate: 27983 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 3504x4512, 27919 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\n", + "history_begin_time" : 1655226215601, + "history_end_time" : 1655228956071, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "3t61HLKhIKkg", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(55, 45))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655225809130, + "history_end_time" : 1655229746395, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "BHDHXN3b4vaD", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 35\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(35, 25))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655225594521, + "history_end_time" : 1655229745910, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "1R6daTOKnP4r", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(20, 18))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\n\n\n", + "history_output" : "Plotting... 2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n", + "history_begin_time" : 1655225053830, + "history_end_time" : 1655229745413, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "EkrdX5jV6XMg", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\ncmaq_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_actual = cmaq_actual['YYYYMMDDHH'].unique()\ndfs_actual = dict(tuple(cmaq_actual.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, figsize=(20, 18))\n\ntime_ = time_[:1]\n\nfor t in time_:\n \n print(\"Plotting... \", t)\n \n # CMAQ prediction dataframe \n df = dfs[t]\n \n # AirNow observation dataframe\n df_Airnow = dfs_Airnow[t]\n\n # CMAQ actual dataframe\n df_actual = dfs_actual[t]\n \n # Setting up GeoPandas df from [CMAQ prediction dataframe] \n predictionMapData = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n predictionMapData = predictionMapData.set_crs(\"EPSG:4326\")\n\n # Setting up GeoPandas df from [AirNow observation dataframe]\n predictionAirnowData = gpd.GeoDataFrame(\n df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n predictionAirnowData = predictionAirnowData.set_crs(\"EPSG:4326\")\n \n # Setting up GeoPandas df from [CMAQ actual dataframe]\n cmaqActualData = gpd.GeoDataFrame(\n df_actual, geometry=gpd.points_from_xy(df_actual.Longitude, df_actual.Latitude))\n cmaqActualData = cmaqActualData.set_crs(\"EPSG:4326\")\n\n\n ###### Plots ROW 1 ######\n \n # Plotting CMAQ prediction Map \n predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n\n # Plotting CMAQ predictions base layer for AirNow observations \n predictionMap = predictionMapData.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax2)\n \n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=predictionMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n ###### Plots ROW 2 ######\n \n # Plotting Actual CMAQ Map \n cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax3)\n \n # Plotting Actual CMAQ base layer for AirNow observations \n actualCmaqMap = cmaqActualData.plot(column='CMAQ12KM_O3(ppb)', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax4)\n \n\n # Plotting AirNow observation layer\n predictionAirnowData.plot(ax=actualCmaqMap, column='AirNOW_O3', \n marker='o', markersize=65, cmap=cmap, \n linewidths=1, edgecolors=\"black\")\n \n \n # cb = plt.colorbar(predictionMap, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n predictionMapPlotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n AirNowPlotTitle = datetime.strftime(dateObj, \"AirNow Stations: %Y-%m-%d (Time: %-H)\")\n cmaqActualPlotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n AirNowActualPlotTitle = datetime.strftime(dateObj, \"Actual AirNow Stations: %Y-%m-%d (Time: %-H)\")\n ax1.set_title(predictionMapPlotTitle, fontsize= 35)\n ax2.set_title(AirNowPlotTitle, fontsize= 35)\n ax3.set_title(cmaqActualPlotTitle, fontsize= 35)\n ax4.set_title(AirNowActualPlotTitle, fontsize= 35)\n print(\"Saving Prediction Map: \", predictionMapPlotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\n\n\n\n", + "history_output" : null, + "history_begin_time" : 1655224988126, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "Mti3KBzYvGJO", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, ax = plt.subplots(figsize=(20, 18))\n\nfor t in time_:\n print(t)\n df = dfs[t]\n \n df_Airnow = dfs_Airnow[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "2022061012\nSaving Prediction Map: 2022-06-10 (Time: 12)\n2022061013\nSaving Prediction Map: 2022-06-10 (Time: 13)\n2022061014\nSaving Prediction Map: 2022-06-10 (Time: 14)\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_2022061014.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nGenerating prediction.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/prediction.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:00.30, start: 0.000000, bitrate: 9416 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 9392 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\n", + "history_begin_time" : 1655217759724, + "history_end_time" : 1655225018173, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "nIP0bmvjjzQU", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, ax = plt.subplots(figsize=(20, 18))\n\nfor t in time_:\n print(t)\n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "2022061012\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 62, in \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\nNameError: name 'df_Airnow' is not defined\n", + "history_begin_time" : 1655217659455, + "history_end_time" : 1655223925074, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "hjvhxzgoks6M", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, ax = plt.subplots(figsize=(20, 18))\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Running", + "history_begin_time" : 1655217624171, + "history_end_time" : 1655223924380, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "ilGcoZd5n5RS", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, ax = plt.subplots(2,2, figsize=(20, 18))\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False,\n cmap=cmap, vmin=0,vmax=80, ax=ax)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Running", + "history_begin_time" : 1655217592017, + "history_end_time" : 1655217616702, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "Jjo9SQWPptCe", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, ax = plt.subplots(1,1)\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80, ax=ax)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 62, in \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\nNameError: name 'df_Airnow' is not defined\n", + "history_begin_time" : 1655217533845, + "history_end_time" : 1655217616215, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "uPG3yW3GZ0wt", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, (ax1, ax2) = plt.subplots(2,2)\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 56, in \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py\", line 950, in __call__\n return plot_dataframe(data, *args, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py\", line 667, in plot_dataframe\n ax.set_aspect(1 / np.cos(y_coord * np.pi / 180))\nAttributeError: 'numpy.ndarray' object has no attribute 'set_aspect'\n", + "history_begin_time" : 1655216905924, + "history_end_time" : 1655217145672, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "VK2aKdFZoNQ2", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, (ax1, ax2) = plt.subplots(2,2)\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80, ax=ax1)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\", ax=ax2)\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : " File \"prediction_maps.py\", line 65\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\", ax=ax2)\n ^\nSyntaxError: keyword argument repeated\n", + "history_begin_time" : 1655216803638, + "history_end_time" : 1655217146548, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "EqAGNYcHRl9W", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, (ax1, ax2) = plt.subplots(2,2)\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n predDf = ax1.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80)\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n ax2.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 56, in \n predDf = ax1.plot(column='prediction', legend=False, figsize=(20, 18),\nAttributeError: 'numpy.ndarray' object has no attribute 'plot'\n", + "history_begin_time" : 1655216714591, + "history_end_time" : 1655217147125, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "UWEsMoTXouJc", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\ntime_ = time_[:3]\n\nfig, ax = plt.subplots(2,2)\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80, ax=ax )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 58, in \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py\", line 950, in __call__\n return plot_dataframe(data, *args, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py\", line 667, in plot_dataframe\n ax.set_aspect(1 / np.cos(y_coord * np.pi / 180))\nAttributeError: 'numpy.ndarray' object has no attribute 'set_aspect'\n", + "history_begin_time" : 1655216432967, + "history_end_time" : 1655217147761, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "UZKKlDgYhiys", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\n#df_actual = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\nfig, ax = plt.subplots((2,1))\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80, ax=ax )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_\" + str(t) + \".tif\")\n \n\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Plots_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 43, in \n fig, ax = plt.subplots((2,1))\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/pyplot.py\", line 1435, in subplots\n axs = fig.subplots(nrows=nrows, ncols=ncols, sharex=sharex, sharey=sharey,\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 896, in subplots\n gs = self.add_gridspec(nrows, ncols, figure=self, **gridspec_kw)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 1395, in add_gridspec\n gs = GridSpec(nrows=nrows, ncols=ncols, figure=self, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/gridspec.py\", line 385, in __init__\n super().__init__(nrows, ncols,\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/gridspec.py\", line 49, in __init__\n raise ValueError(\nValueError: Number of rows must be a positive integer, not (2, 1)\n", + "history_begin_time" : 1655216296417, + "history_end_time" : 1655217148402, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "LyVEuDu3Z1eY", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t_Airnow), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n print(\"Saving AirNow Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t_Airnow) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\nprint(\"Generating predctionAirNow.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\nprint(\"Done predctionAirNow.gif !!!\")\n\n", + "history_output" : "Saving Prediction Map: 2022-06-10 (Time: 12)\nSaving Prediction Map: 2022-06-10 (Time: 13)\nSaving Prediction Map: 2022-06-10 (Time: 14)\nSaving Prediction Map: 2022-06-10 (Time: 15)\nSaving Prediction Map: 2022-06-10 (Time: 16)\nSaving Prediction Map: 2022-06-10 (Time: 17)\nSaving Prediction Map: 2022-06-10 (Time: 18)\nSaving Prediction Map: 2022-06-10 (Time: 19)\nSaving Prediction Map: 2022-06-10 (Time: 20)\nSaving Prediction Map: 2022-06-10 (Time: 21)\nSaving Prediction Map: 2022-06-10 (Time: 22)\nSaving Prediction Map: 2022-06-10 (Time: 23)\nSaving Prediction Map: 2022-06-10 (Time: 0)\nSaving Prediction Map: 2022-06-10 (Time: 1)\nSaving Prediction Map: 2022-06-10 (Time: 2)\nSaving Prediction Map: 2022-06-10 (Time: 3)\nSaving Prediction Map: 2022-06-10 (Time: 4)\nSaving Prediction Map: 2022-06-10 (Time: 5)\nSaving Prediction Map: 2022-06-10 (Time: 6)\nSaving Prediction Map: 2022-06-10 (Time: 7)\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\nSaving Prediction Map: 2022-06-10 (Time: 8)\nSaving Prediction Map: 2022-06-10 (Time: 9)\nSaving Prediction Map: 2022-06-10 (Time: 10)\nSaving Prediction Map: 2022-06-10 (Time: 11)\nSaving Prediction Map: 2022-06-09 (Time: 12)\nSaving Prediction Map: 2022-06-09 (Time: 13)\nSaving Prediction Map: 2022-06-09 (Time: 14)\nSaving Prediction Map: 2022-06-09 (Time: 15)\nSaving Prediction Map: 2022-06-09 (Time: 16)\nSaving Prediction Map: 2022-06-09 (Time: 17)\nSaving Prediction Map: 2022-06-09 (Time: 18)\nSaving Prediction Map: 2022-06-09 (Time: 19)\nSaving Prediction Map: 2022-06-09 (Time: 20)\nSaving Prediction Map: 2022-06-09 (Time: 21)\nSaving Prediction Map: 2022-06-09 (Time: 22)\nSaving Prediction Map: 2022-06-09 (Time: 23)\nSaving Prediction Map: 2022-06-09 (Time: 0)\nSaving Prediction Map: 2022-06-09 (Time: 1)\nSaving Prediction Map: 2022-06-09 (Time: 2)\nSaving Prediction Map: 2022-06-09 (Time: 3)\nSaving Prediction Map: 2022-06-09 (Time: 4)\nSaving Prediction Map: 2022-06-09 (Time: 5)\nSaving Prediction Map: 2022-06-09 (Time: 6)\nSaving Prediction Map: 2022-06-09 (Time: 7)\nSaving Prediction Map: 2022-06-09 (Time: 8)\nSaving Prediction Map: 2022-06-09 (Time: 9)\nSaving Prediction Map: 2022-06-09 (Time: 10)\nSaving Prediction Map: 2022-06-09 (Time: 11)\nSaving Prediction Map: 2022-06-08 (Time: 12)\nSaving Prediction Map: 2022-06-08 (Time: 13)\nSaving Prediction Map: 2022-06-08 (Time: 14)\nSaving Prediction Map: 2022-06-08 (Time: 15)\nSaving Prediction Map: 2022-06-08 (Time: 16)\nSaving Prediction Map: 2022-06-08 (Time: 17)\nSaving Prediction Map: 2022-06-08 (Time: 18)\nSaving Prediction Map: 2022-06-08 (Time: 19)\nSaving Prediction Map: 2022-06-08 (Time: 20)\nSaving Prediction Map: 2022-06-08 (Time: 21)\nSaving Prediction Map: 2022-06-08 (Time: 22)\nSaving Prediction Map: 2022-06-08 (Time: 23)\nSaving Prediction Map: 2022-06-08 (Time: 0)\nSaving Prediction Map: 2022-06-08 (Time: 1)\nSaving Prediction Map: 2022-06-08 (Time: 2)\nSaving Prediction Map: 2022-06-08 (Time: 3)\nSaving Prediction Map: 2022-06-08 (Time: 4)\nSaving Prediction Map: 2022-06-08 (Time: 5)\nSaving Prediction Map: 2022-06-08 (Time: 6)\nSaving Prediction Map: 2022-06-08 (Time: 7)\nSaving Prediction Map: 2022-06-08 (Time: 8)\nSaving Prediction Map: 2022-06-08 (Time: 9)\nSaving Prediction Map: 2022-06-08 (Time: 10)\nSaving Prediction Map: 2022-06-08 (Time: 11)\nSaving Prediction Map: 2022-06-07 (Time: 12)\nSaving Prediction Map: 2022-06-07 (Time: 13)\nSaving Prediction Map: 2022-06-07 (Time: 14)\nSaving Prediction Map: 2022-06-07 (Time: 15)\nSaving Prediction Map: 2022-06-07 (Time: 16)\nSaving Prediction Map: 2022-06-07 (Time: 17)\nSaving Prediction Map: 2022-06-07 (Time: 18)\nSaving Prediction Map: 2022-06-07 (Time: 19)\nSaving Prediction Map: 2022-06-07 (Time: 20)\nSaving Prediction Map: 2022-06-07 (Time: 21)\nSaving Prediction Map: 2022-06-07 (Time: 22)\nSaving Prediction Map: 2022-06-07 (Time: 23)\nSaving Prediction Map: 2022-06-07 (Time: 0)\nSaving Prediction Map: 2022-06-07 (Time: 1)\nSaving Prediction Map: 2022-06-07 (Time: 2)\nSaving Prediction Map: 2022-06-07 (Time: 3)\nSaving Prediction Map: 2022-06-07 (Time: 4)\nSaving Prediction Map: 2022-06-07 (Time: 5)\nSaving Prediction Map: 2022-06-07 (Time: 6)\nSaving Prediction Map: 2022-06-07 (Time: 7)\nSaving Prediction Map: 2022-06-07 (Time: 8)\nSaving Prediction Map: 2022-06-07 (Time: 9)\nSaving Prediction Map: 2022-06-07 (Time: 10)\nSaving Prediction Map: 2022-06-07 (Time: 11)\nSaving Prediction Map: 2022-06-06 (Time: 12)\nSaving Prediction Map: 2022-06-06 (Time: 13)\nSaving Prediction Map: 2022-06-06 (Time: 14)\nSaving Prediction Map: 2022-06-06 (Time: 15)\nSaving Prediction Map: 2022-06-06 (Time: 16)\nSaving Prediction Map: 2022-06-06 (Time: 17)\nSaving Prediction Map: 2022-06-06 (Time: 18)\nSaving Prediction Map: 2022-06-06 (Time: 19)\nSaving Prediction Map: 2022-06-06 (Time: 20)\nSaving Prediction Map: 2022-06-06 (Time: 21)\nSaving Prediction Map: 2022-06-06 (Time: 22)\nSaving Prediction Map: 2022-06-06 (Time: 23)\nSaving Prediction Map: 2022-06-06 (Time: 0)\nSaving Prediction Map: 2022-06-06 (Time: 1)\nSaving Prediction Map: 2022-06-06 (Time: 2)\nSaving Prediction Map: 2022-06-06 (Time: 3)\nSaving Prediction Map: 2022-06-06 (Time: 4)\nSaving Prediction Map: 2022-06-06 (Time: 5)\nSaving Prediction Map: 2022-06-06 (Time: 6)\nSaving Prediction Map: 2022-06-06 (Time: 7)\nSaving Prediction Map: 2022-06-06 (Time: 8)\nSaving Prediction Map: 2022-06-06 (Time: 9)\nSaving Prediction Map: 2022-06-06 (Time: 10)\nSaving Prediction Map: 2022-06-06 (Time: 11)\nSaving Prediction Map: 2022-06-05 (Time: 12)\nSaving Prediction Map: 2022-06-05 (Time: 13)\nSaving Prediction Map: 2022-06-05 (Time: 14)\nSaving Prediction Map: 2022-06-05 (Time: 15)\nSaving Prediction Map: 2022-06-05 (Time: 16)\nSaving Prediction Map: 2022-06-05 (Time: 17)\nSaving Prediction Map: 2022-06-05 (Time: 18)\nSaving Prediction Map: 2022-06-05 (Time: 19)\nSaving Prediction Map: 2022-06-05 (Time: 20)\nSaving Prediction Map: 2022-06-05 (Time: 21)\nSaving Prediction Map: 2022-06-05 (Time: 22)\nSaving Prediction Map: 2022-06-05 (Time: 23)\nSaving Prediction Map: 2022-06-05 (Time: 0)\nSaving Prediction Map: 2022-06-05 (Time: 1)\nSaving Prediction Map: 2022-06-05 (Time: 2)\nSaving Prediction Map: 2022-06-05 (Time: 3)\nSaving Prediction Map: 2022-06-05 (Time: 4)\nSaving Prediction Map: 2022-06-05 (Time: 5)\nSaving Prediction Map: 2022-06-05 (Time: 6)\nSaving Prediction Map: 2022-06-05 (Time: 7)\nSaving Prediction Map: 2022-06-05 (Time: 8)\nSaving Prediction Map: 2022-06-05 (Time: 9)\nSaving Prediction Map: 2022-06-05 (Time: 10)\nSaving Prediction Map: 2022-06-05 (Time: 11)\nSaving Prediction Map: 2022-06-04 (Time: 12)\nSaving Prediction Map: 2022-06-04 (Time: 13)\nSaving Prediction Map: 2022-06-04 (Time: 14)\nSaving Prediction Map: 2022-06-04 (Time: 15)\nSaving Prediction Map: 2022-06-04 (Time: 16)\nSaving Prediction Map: 2022-06-04 (Time: 17)\nSaving Prediction Map: 2022-06-04 (Time: 18)\nSaving Prediction Map: 2022-06-04 (Time: 19)\nSaving Prediction Map: 2022-06-04 (Time: 20)\nSaving Prediction Map: 2022-06-04 (Time: 21)\nSaving Prediction Map: 2022-06-04 (Time: 22)\nSaving Prediction Map: 2022-06-04 (Time: 23)\nSaving Prediction Map: 2022-06-04 (Time: 0)\nSaving Prediction Map: 2022-06-04 (Time: 1)\nSaving Prediction Map: 2022-06-04 (Time: 2)\nSaving Prediction Map: 2022-06-04 (Time: 3)\nSaving Prediction Map: 2022-06-04 (Time: 4)\nSaving Prediction Map: 2022-06-04 (Time: 5)\nSaving Prediction Map: 2022-06-04 (Time: 6)\nSaving Prediction Map: 2022-06-04 (Time: 7)\nSaving Prediction Map: 2022-06-04 (Time: 8)\nSaving Prediction Map: 2022-06-04 (Time: 9)\nSaving Prediction Map: 2022-06-04 (Time: 10)\nSaving Prediction Map: 2022-06-04 (Time: 11)\nSaving AirNow Map: 2022-06-10 (Time: 12)\nSaving AirNow Map: 2022-06-10 (Time: 13)\nSaving AirNow Map: 2022-06-10 (Time: 14)\nSaving AirNow Map: 2022-06-10 (Time: 15)\nSaving AirNow Map: 2022-06-10 (Time: 16)\nSaving AirNow Map: 2022-06-10 (Time: 17)\nSaving AirNow Map: 2022-06-10 (Time: 18)\nSaving AirNow Map: 2022-06-10 (Time: 19)\nSaving AirNow Map: 2022-06-10 (Time: 20)\nSaving AirNow Map: 2022-06-10 (Time: 21)\nSaving AirNow Map: 2022-06-10 (Time: 22)\nSaving AirNow Map: 2022-06-10 (Time: 23)\nSaving AirNow Map: 2022-06-10 (Time: 0)\nSaving AirNow Map: 2022-06-10 (Time: 1)\nSaving AirNow Map: 2022-06-10 (Time: 2)\nSaving AirNow Map: 2022-06-10 (Time: 3)\nSaving AirNow Map: 2022-06-10 (Time: 4)\nSaving AirNow Map: 2022-06-10 (Time: 5)\nSaving AirNow Map: 2022-06-10 (Time: 6)\nSaving AirNow Map: 2022-06-10 (Time: 7)\nSaving AirNow Map: 2022-06-10 (Time: 8)\nSaving AirNow Map: 2022-06-10 (Time: 9)\nSaving AirNow Map: 2022-06-10 (Time: 10)\nSaving AirNow Map: 2022-06-10 (Time: 11)\nSaving AirNow Map: 2022-06-09 (Time: 12)\nSaving AirNow Map: 2022-06-09 (Time: 13)\nSaving AirNow Map: 2022-06-09 (Time: 14)\nSaving AirNow Map: 2022-06-09 (Time: 15)\nSaving AirNow Map: 2022-06-09 (Time: 16)\nSaving AirNow Map: 2022-06-09 (Time: 17)\nSaving AirNow Map: 2022-06-09 (Time: 18)\nSaving AirNow Map: 2022-06-09 (Time: 19)\nSaving AirNow Map: 2022-06-09 (Time: 20)\nSaving AirNow Map: 2022-06-09 (Time: 21)\nSaving AirNow Map: 2022-06-09 (Time: 22)\nSaving AirNow Map: 2022-06-09 (Time: 23)\nSaving AirNow Map: 2022-06-09 (Time: 0)\nSaving AirNow Map: 2022-06-09 (Time: 1)\nSaving AirNow Map: 2022-06-09 (Time: 2)\nSaving AirNow Map: 2022-06-09 (Time: 3)\nSaving AirNow Map: 2022-06-09 (Time: 4)\nSaving AirNow Map: 2022-06-09 (Time: 5)\nSaving AirNow Map: 2022-06-09 (Time: 6)\nSaving AirNow Map: 2022-06-09 (Time: 7)\nSaving AirNow Map: 2022-06-09 (Time: 8)\nSaving AirNow Map: 2022-06-09 (Time: 9)\nSaving AirNow Map: 2022-06-09 (Time: 10)\nSaving AirNow Map: 2022-06-09 (Time: 11)\nSaving AirNow Map: 2022-06-08 (Time: 12)\nSaving AirNow Map: 2022-06-08 (Time: 13)\nSaving AirNow Map: 2022-06-08 (Time: 14)\nSaving AirNow Map: 2022-06-08 (Time: 15)\nSaving AirNow Map: 2022-06-08 (Time: 16)\nSaving AirNow Map: 2022-06-08 (Time: 17)\nSaving AirNow Map: 2022-06-08 (Time: 18)\nSaving AirNow Map: 2022-06-08 (Time: 19)\nSaving AirNow Map: 2022-06-08 (Time: 20)\nSaving AirNow Map: 2022-06-08 (Time: 21)\nSaving AirNow Map: 2022-06-08 (Time: 22)\nSaving AirNow Map: 2022-06-08 (Time: 23)\nSaving AirNow Map: 2022-06-08 (Time: 0)\nSaving AirNow Map: 2022-06-08 (Time: 1)\nSaving AirNow Map: 2022-06-08 (Time: 2)\nSaving AirNow Map: 2022-06-08 (Time: 3)\nSaving AirNow Map: 2022-06-08 (Time: 4)\nSaving AirNow Map: 2022-06-08 (Time: 5)\nSaving AirNow Map: 2022-06-08 (Time: 6)\nSaving AirNow Map: 2022-06-08 (Time: 7)\nSaving AirNow Map: 2022-06-08 (Time: 8)\nSaving AirNow Map: 2022-06-08 (Time: 9)\nSaving AirNow Map: 2022-06-08 (Time: 10)\nSaving AirNow Map: 2022-06-08 (Time: 11)\nSaving AirNow Map: 2022-06-07 (Time: 12)\nSaving AirNow Map: 2022-06-07 (Time: 13)\nSaving AirNow Map: 2022-06-07 (Time: 14)\nSaving AirNow Map: 2022-06-07 (Time: 15)\nSaving AirNow Map: 2022-06-07 (Time: 16)\nSaving AirNow Map: 2022-06-07 (Time: 17)\nSaving AirNow Map: 2022-06-07 (Time: 18)\nSaving AirNow Map: 2022-06-07 (Time: 19)\nSaving AirNow Map: 2022-06-07 (Time: 20)\nSaving AirNow Map: 2022-06-07 (Time: 21)\nSaving AirNow Map: 2022-06-07 (Time: 22)\nSaving AirNow Map: 2022-06-07 (Time: 23)\nSaving AirNow Map: 2022-06-07 (Time: 0)\nSaving AirNow Map: 2022-06-07 (Time: 1)\nSaving AirNow Map: 2022-06-07 (Time: 2)\nSaving AirNow Map: 2022-06-07 (Time: 3)\nSaving AirNow Map: 2022-06-07 (Time: 4)\nSaving AirNow Map: 2022-06-07 (Time: 5)\nSaving AirNow Map: 2022-06-07 (Time: 6)\nSaving AirNow Map: 2022-06-07 (Time: 7)\nSaving AirNow Map: 2022-06-07 (Time: 8)\nSaving AirNow Map: 2022-06-07 (Time: 9)\nSaving AirNow Map: 2022-06-07 (Time: 10)\nSaving AirNow Map: 2022-06-07 (Time: 11)\nSaving AirNow Map: 2022-06-06 (Time: 12)\nSaving AirNow Map: 2022-06-06 (Time: 13)\nSaving AirNow Map: 2022-06-06 (Time: 14)\nSaving AirNow Map: 2022-06-06 (Time: 15)\nSaving AirNow Map: 2022-06-06 (Time: 16)\nSaving AirNow Map: 2022-06-06 (Time: 17)\nSaving AirNow Map: 2022-06-06 (Time: 18)\nSaving AirNow Map: 2022-06-06 (Time: 19)\nSaving AirNow Map: 2022-06-06 (Time: 20)\nSaving AirNow Map: 2022-06-06 (Time: 21)\nSaving AirNow Map: 2022-06-06 (Time: 22)\nSaving AirNow Map: 2022-06-06 (Time: 23)\nSaving AirNow Map: 2022-06-06 (Time: 0)\nSaving AirNow Map: 2022-06-06 (Time: 1)\nSaving AirNow Map: 2022-06-06 (Time: 2)\nSaving AirNow Map: 2022-06-06 (Time: 3)\nSaving AirNow Map: 2022-06-06 (Time: 4)\nSaving AirNow Map: 2022-06-06 (Time: 5)\nSaving AirNow Map: 2022-06-06 (Time: 6)\nSaving AirNow Map: 2022-06-06 (Time: 7)\nSaving AirNow Map: 2022-06-06 (Time: 8)\nSaving AirNow Map: 2022-06-06 (Time: 9)\nSaving AirNow Map: 2022-06-06 (Time: 10)\nSaving AirNow Map: 2022-06-06 (Time: 11)\nSaving AirNow Map: 2022-06-05 (Time: 12)\nSaving AirNow Map: 2022-06-05 (Time: 13)\nSaving AirNow Map: 2022-06-05 (Time: 14)\nSaving AirNow Map: 2022-06-05 (Time: 15)\nSaving AirNow Map: 2022-06-05 (Time: 16)\nSaving AirNow Map: 2022-06-05 (Time: 17)\nSaving AirNow Map: 2022-06-05 (Time: 18)\nSaving AirNow Map: 2022-06-05 (Time: 19)\nSaving AirNow Map: 2022-06-05 (Time: 20)\nSaving AirNow Map: 2022-06-05 (Time: 21)\nSaving AirNow Map: 2022-06-05 (Time: 22)\nSaving AirNow Map: 2022-06-05 (Time: 23)\nSaving AirNow Map: 2022-06-05 (Time: 0)\nSaving AirNow Map: 2022-06-05 (Time: 1)\nSaving AirNow Map: 2022-06-05 (Time: 2)\nSaving AirNow Map: 2022-06-05 (Time: 3)\nSaving AirNow Map: 2022-06-05 (Time: 4)\nSaving AirNow Map: 2022-06-05 (Time: 5)\nSaving AirNow Map: 2022-06-05 (Time: 6)\nSaving AirNow Map: 2022-06-05 (Time: 7)\nSaving AirNow Map: 2022-06-05 (Time: 8)\nSaving AirNow Map: 2022-06-05 (Time: 9)\nSaving AirNow Map: 2022-06-05 (Time: 10)\nSaving AirNow Map: 2022-06-05 (Time: 11)\nSaving AirNow Map: 2022-06-04 (Time: 12)\nSaving AirNow Map: 2022-06-04 (Time: 13)\nSaving AirNow Map: 2022-06-04 (Time: 14)\nSaving AirNow Map: 2022-06-04 (Time: 15)\nSaving AirNow Map: 2022-06-04 (Time: 16)\nSaving AirNow Map: 2022-06-04 (Time: 17)\nSaving AirNow Map: 2022-06-04 (Time: 18)\nSaving AirNow Map: 2022-06-04 (Time: 19)\nSaving AirNow Map: 2022-06-04 (Time: 20)\nSaving AirNow Map: 2022-06-04 (Time: 21)\nSaving AirNow Map: 2022-06-04 (Time: 22)\nSaving AirNow Map: 2022-06-04 (Time: 23)\nSaving AirNow Map: 2022-06-04 (Time: 0)\nSaving AirNow Map: 2022-06-04 (Time: 1)\nSaving AirNow Map: 2022-06-04 (Time: 2)\nSaving AirNow Map: 2022-06-04 (Time: 3)\nSaving AirNow Map: 2022-06-04 (Time: 4)\nSaving AirNow Map: 2022-06-04 (Time: 5)\nSaving AirNow Map: 2022-06-04 (Time: 6)\nSaving AirNow Map: 2022-06-04 (Time: 7)\nSaving AirNow Map: 2022-06-04 (Time: 8)\nSaving AirNow Map: 2022-06-04 (Time: 9)\nSaving AirNow Map: 2022-06-04 (Time: 10)\nSaving AirNow Map: 2022-06-04 (Time: 11)\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060400.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060401.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060402.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060403.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060404.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060405.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060406.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060407.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060408.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060409.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060410.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060411.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060412.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060413.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060414.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060415.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060416.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060417.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060418.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060419.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060420.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060421.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060422.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060423.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060500.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060501.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060502.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060503.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060504.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060505.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060506.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060507.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060508.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060509.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060510.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060511.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060512.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060513.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060514.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060515.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060516.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060517.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060518.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060519.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060520.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060521.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060522.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060523.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060723.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060800.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060801.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060802.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060803.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060804.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060805.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060806.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060807.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060808.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060809.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060810.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060811.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060812.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060813.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060814.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060815.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060816.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060817.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060818.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060819.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060820.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060821.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060822.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060823.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060900.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060901.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060902.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060903.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060904.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060905.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060906.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060907.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060908.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060909.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060910.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060911.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060912.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060913.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060914.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060915.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060916.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060917.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060918.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060919.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060920.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060921.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060922.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060923.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061000.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061001.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061002.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061003.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061004.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061005.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061006.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061007.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061008.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061009.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061010.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061011.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061014.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061015.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061016.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061017.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061018.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061019.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061020.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061021.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061022.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061023.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060400.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060401.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060402.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060403.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060404.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060405.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060406.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060407.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060408.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060409.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060410.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060411.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060412.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060413.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060414.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060415.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060416.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060417.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060418.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060419.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060420.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060421.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060422.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060423.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060500.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060501.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060502.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060503.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060504.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060505.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060506.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060507.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060508.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060509.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060510.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060511.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060512.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060513.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060514.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060515.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060516.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060517.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060518.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060519.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060520.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060521.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060522.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060523.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060723.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060800.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060801.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060802.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060803.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060804.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060805.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060806.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060807.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060808.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060809.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060810.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060811.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060812.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060813.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060814.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060815.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060816.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060817.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060818.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060819.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060820.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060821.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060822.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060823.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060900.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060901.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060902.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060903.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060904.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060905.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060906.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060907.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060908.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060909.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060910.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060911.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060912.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060913.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060914.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060915.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060916.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060917.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060918.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060919.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060920.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060921.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060922.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060923.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061000.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061001.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061002.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061003.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061004.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061005.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061006.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061007.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061008.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061009.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061010.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061011.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061014.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061015.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061016.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061017.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061018.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061019.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061020.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061021.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061022.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061023.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nGenerating prediction.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/prediction.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:16.80, start: 0.000000, bitrate: 7654 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 7653 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/prediction.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0x1694bc0] \u001B[0m255(+1) colors generated out of 477188 colors; ratio=0.000534\n\u001B[1;32m[Parsed_palettegen_2 @ 0x1694bc0] \u001B[0m\u001B[0;33mDupped color: FF48A66C\n\u001B[0mframe= 1 fps=0.2 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00169x \nframe= 6 fps=0.9 q=-0.0 size= 3072kB time=00:00:00.51 bitrate=49344.8kbits/s speed=0.078x \nframe= 10 fps=1.4 q=-0.0 size= 5376kB time=00:00:00.91 bitrate=48395.8kbits/s speed=0.128x \nframe= 14 fps=1.8 q=-0.0 size= 7424kB time=00:00:01.31 bitrate=46425.5kbits/s speed=0.17x \nframe= 18 fps=2.2 q=-0.0 size= 9728kB time=00:00:01.71 bitrate=46603.4kbits/s speed=0.207x \nframe= 22 fps=2.5 q=-0.0 size= 12288kB time=00:00:02.11 bitrate=47707.7kbits/s speed=0.237x \nframe= 26 fps=2.7 q=-0.0 size= 15104kB time=00:00:02.51 bitrate=49295.6kbits/s speed=0.265x \nframe= 30 fps=3.0 q=-0.0 size= 17408kB time=00:00:02.91 bitrate=49005.6kbits/s speed=0.29x \nframe= 34 fps=3.2 q=-0.0 size= 19712kB time=00:00:03.31 bitrate=48785.7kbits/s speed=0.313x \nframe= 38 fps=3.4 q=-0.0 size= 21760kB time=00:00:03.71 bitrate=48048.0kbits/s speed=0.334x \nframe= 42 fps=3.6 q=-0.0 size= 24064kB time=00:00:04.11 bitrate=47964.1kbits/s speed=0.352x \nframe= 46 fps=3.7 q=-0.0 size= 26624kB time=00:00:04.51 bitrate=48360.0kbits/s speed=0.367x \nframe= 50 fps=3.9 q=-0.0 size= 29184kB time=00:00:04.91 bitrate=48691.5kbits/s speed=0.381x \nframe= 54 fps=4.0 q=-0.0 size= 31744kB time=00:00:05.31 bitrate=48973.0kbits/s speed=0.396x \nframe= 58 fps=4.2 q=-0.0 size= 34048kB time=00:00:05.71 bitrate=48847.8kbits/s speed=0.409x \nframe= 62 fps=4.3 q=-0.0 size= 36096kB time=00:00:06.11 bitrate=48395.8kbits/s speed=0.42x \nframe= 66 fps=4.4 q=-0.0 size= 38400kB time=00:00:06.51 bitrate=48321.5kbits/s speed=0.432x \nframe= 70 fps=4.5 q=-0.0 size= 40960kB time=00:00:06.91 bitrate=48559.2kbits/s speed=0.44x \nframe= 74 fps=4.5 q=-0.0 size= 43520kB time=00:00:07.31 bitrate=48771.0kbits/s speed=0.448x \nframe= 78 fps=4.6 q=-0.0 size= 46080kB time=00:00:07.71 bitrate=48960.7kbits/s speed=0.457x \nframe= 82 fps=4.7 q=-0.0 size= 48384kB time=00:00:08.11 bitrate=48873.2kbits/s speed=0.465x \nframe= 86 fps=4.8 q=-0.0 size= 50432kB time=00:00:08.51 bitrate=48547.5kbits/s speed=0.473x \nframe= 90 fps=4.9 q=-0.0 size= 52736kB time=00:00:08.91 bitrate=48486.3kbits/s speed=0.48x \nframe= 94 fps=4.9 q=-0.0 size= 55296kB time=00:00:09.31 bitrate=48655.7kbits/s speed=0.486x \nframe= 98 fps=5.0 q=-0.0 size= 58112kB time=00:00:09.71 bitrate=49027.1kbits/s speed=0.491x \nframe= 102 fps=5.0 q=-0.0 size= 60416kB time=00:00:10.11 bitrate=48954.3kbits/s speed=0.498x \nframe= 106 fps=5.1 q=-0.0 size= 62720kB time=00:00:10.51 bitrate=48887.0kbits/s speed=0.504x \nframe= 110 fps=5.1 q=-0.0 size= 64768kB time=00:00:10.91 bitrate=48632.4kbits/s speed=0.509x \nframe= 114 fps=5.2 q=-0.0 size= 67072kB time=00:00:11.31 bitrate=48581.2kbits/s speed=0.515x \nframe= 118 fps=5.2 q=-0.0 size= 69632kB time=00:00:11.71 bitrate=48712.7kbits/s speed=0.519x \nframe= 122 fps=5.3 q=-0.0 size= 72448kB time=00:00:12.11 bitrate=49008.6kbits/s speed=0.523x \nframe= 126 fps=5.3 q=-0.0 size= 74752kB time=00:00:12.51 bitrate=48950.3kbits/s speed=0.528x \nframe= 130 fps=5.4 q=-0.0 size= 77056kB time=00:00:12.91 bitrate=48895.6kbits/s speed=0.533x \nframe= 134 fps=5.4 q=-0.0 size= 79104kB time=00:00:13.31 bitrate=48686.7kbits/s speed=0.537x \nframe= 138 fps=5.4 q=-0.0 size= 81408kB time=00:00:13.71 bitrate=48642.9kbits/s speed=0.541x \nframe= 142 fps=5.5 q=-0.0 size= 83968kB time=00:00:14.11 bitrate=48750.2kbits/s speed=0.544x \nframe= 146 fps=5.5 q=-0.0 size= 86784kB time=00:00:14.51 bitrate=48996.2kbits/s speed=0.547x \nframe= 150 fps=5.5 q=-0.0 size= 89088kB time=00:00:14.91 bitrate=48947.6kbits/s speed=0.55x \nframe= 154 fps=5.6 q=-0.0 size= 91392kB time=00:00:15.31 bitrate=48901.6kbits/s speed=0.553x \nframe= 158 fps=5.6 q=-0.0 size= 93440kB time=00:00:15.71 bitrate=48724.4kbits/s speed=0.556x \nframe= 162 fps=5.6 q=-0.0 size= 95744kB time=00:00:16.11 bitrate=48686.2kbits/s speed=0.56x \nframe= 166 fps=5.6 q=-0.0 size= 98560kB time=00:00:16.51 bitrate=48903.9kbits/s speed=0.562x \nframe= 167 fps=5.6 q=-0.0 Lsize= 99903kB time=00:00:16.61 bitrate=49271.8kbits/s speed=0.559x \nvideo:99903kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000020%\nDone prediction.gif !!!\nGenerating predctionAirNow.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:16.80, start: 0.000000, bitrate: 8060 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 8059 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/predctionAirNow.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0x235ef00] \u001B[0m255(+1) colors generated out of 1018539 colors; ratio=0.000250\nframe= 1 fps=0.1 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00131x \nframe= 5 fps=0.6 q=-0.0 size= 2560kB time=00:00:00.41 bitrate=51150.0kbits/s speed=0.0501x \nframe= 8 fps=0.9 q=-0.0 size= 4352kB time=00:00:00.71 bitrate=50213.5kbits/s speed=0.0816x \nframe= 11 fps=1.2 q=-0.0 size= 6144kB time=00:00:01.01 bitrate=49833.3kbits/s speed=0.11x \nframe= 14 fps=1.4 q=-0.0 size= 7936kB time=00:00:01.31 bitrate=49627.3kbits/s speed=0.135x \nframe= 17 fps=1.7 q=-0.0 size= 9728kB time=00:00:01.61 bitrate=49498.0kbits/s speed=0.158x \nframe= 20 fps=1.9 q=-0.0 size= 11520kB time=00:00:01.91 bitrate=49409.3kbits/s speed=0.177x \nframe= 23 fps=2.0 q=-0.0 size= 13568kB time=00:00:02.21 bitrate=50293.7kbits/s speed=0.195x \nframe= 26 fps=2.2 q=-0.0 size= 15616kB time=00:00:02.51 bitrate=50966.6kbits/s speed=0.212x \nframe= 30 fps=2.4 q=-0.0 size= 18176kB time=00:00:02.91 bitrate=51167.6kbits/s speed=0.233x \nframe= 33 fps=2.5 q=-0.0 size= 19968kB time=00:00:03.21 bitrate=50958.8kbits/s speed=0.247x \nframe= 36 fps=2.7 q=-0.0 size= 21760kB time=00:00:03.51 bitrate=50785.7kbits/s speed=0.259x \nframe= 39 fps=2.8 q=-0.0 size= 23552kB time=00:00:03.81 bitrate=50639.9kbits/s speed=0.271x \nframe= 42 fps=2.9 q=-0.0 size= 25344kB time=00:00:04.11 bitrate=50515.3kbits/s speed=0.282x \nframe= 45 fps=3.0 q=-0.0 size= 27392kB time=00:00:04.41 bitrate=50883.3kbits/s speed=0.292x \nframe= 48 fps=3.1 q=-0.0 size= 29184kB time=00:00:04.71 bitrate=50759.1kbits/s speed= 0.3x \nframe= 51 fps=3.1 q=-0.0 size= 31232kB time=00:00:05.01 bitrate=51068.4kbits/s speed=0.309x \nframe= 55 fps=3.3 q=-0.0 size= 33792kB time=00:00:05.41 bitrate=51169.0kbits/s speed=0.321x \nframe= 58 fps=3.3 q=-0.0 size= 35584kB time=00:00:05.71 bitrate=51051.5kbits/s speed=0.328x \nframe= 61 fps=3.4 q=-0.0 size= 37376kB time=00:00:06.01 bitrate=50945.8kbits/s speed=0.336x \nframe= 65 fps=3.5 q=-0.0 size= 39680kB time=00:00:06.41 bitrate=50711.2kbits/s speed=0.346x \nframe= 68 fps=3.6 q=-0.0 size= 41472kB time=00:00:06.71 bitrate=50631.7kbits/s speed=0.352x \nframe= 71 fps=3.6 q=-0.0 size= 43520kB time=00:00:07.01 bitrate=50858.2kbits/s speed=0.358x \nframe= 74 fps=3.7 q=-0.0 size= 45568kB time=00:00:07.31 bitrate=51066.1kbits/s speed=0.363x \nframe= 78 fps=3.8 q=-0.0 size= 48128kB time=00:00:07.71 bitrate=51136.8kbits/s speed=0.371x \nframe= 82 fps=3.8 q=-0.0 size= 50688kB time=00:00:08.11 bitrate=51200.5kbits/s speed=0.379x \nframe= 85 fps=3.9 q=-0.0 size= 52224kB time=00:00:08.41 bitrate=50870.3kbits/s speed=0.384x \nframe= 89 fps=3.9 q=-0.0 size= 54528kB time=00:00:08.81 bitrate=50703.0kbits/s speed=0.391x \nframe= 93 fps=4.0 q=-0.0 size= 57344kB time=00:00:09.21 bitrate=51005.7kbits/s speed=0.397x \nframe= 96 fps=4.0 q=-0.0 size= 59136kB time=00:00:09.51 bitrate=50940.3kbits/s speed=0.401x \nframe= 99 fps=4.1 q=-0.0 size= 61184kB time=00:00:09.81 bitrate=51092.7kbits/s speed=0.405x \nframe= 103 fps=4.1 q=-0.0 size= 63744kB time=00:00:10.21 bitrate=51145.0kbits/s speed=0.41x \nframe= 107 fps=4.2 q=-0.0 size= 66304kB time=00:00:10.61 bitrate=51193.4kbits/s speed=0.415x \nframe= 111 fps=4.2 q=-0.0 size= 68352kB time=00:00:11.01 bitrate=50857.4kbits/s speed=0.42x \nframe= 115 fps=4.3 q=-0.0 size= 70912kB time=00:00:11.41 bitrate=50912.5kbits/s speed=0.425x \nframe= 118 fps=4.3 q=-0.0 size= 72960kB time=00:00:11.71 bitrate=51040.8kbits/s speed=0.428x \nframe= 121 fps=4.3 q=-0.0 size= 74752kB time=00:00:12.01 bitrate=50988.2kbits/s speed=0.431x \nframe= 125 fps=4.4 q=-0.0 size= 77568kB time=00:00:12.41 bitrate=51203.6kbits/s speed=0.435x \nframe= 129 fps=4.4 q=-0.0 size= 79872kB time=00:00:12.81 bitrate=51078.2kbits/s speed=0.439x \nframe= 133 fps=4.5 q=-0.0 size= 82176kB time=00:00:13.21 bitrate=50960.3kbits/s speed=0.443x \nframe= 137 fps=4.5 q=-0.0 size= 84480kB time=00:00:13.61 bitrate=50849.4kbits/s speed=0.447x \nframe= 141 fps=4.5 q=-0.0 size= 87296kB time=00:00:14.01 bitrate=51044.2kbits/s speed=0.45x \nframe= 144 fps=4.6 q=-0.0 size= 89088kB time=00:00:14.31 bitrate=50999.9kbits/s speed=0.452x \nframe= 148 fps=4.6 q=-0.0 size= 91904kB time=00:00:14.71 bitrate=51181.3kbits/s speed=0.456x \nframe= 152 fps=4.6 q=-0.0 size= 94464kB time=00:00:15.11 bitrate=51214.4kbits/s speed=0.459x \nframe= 156 fps=4.6 q=-0.0 size= 96768kB time=00:00:15.51 bitrate=51110.5kbits/s speed=0.462x \nframe= 160 fps=4.7 q=-0.0 size= 99072kB time=00:00:15.91 bitrate=51011.8kbits/s speed=0.465x \nframe= 164 fps=4.7 q=-0.0 size= 101632kB time=00:00:16.31 bitrate=51046.6kbits/s speed=0.467x \nframe= 167 fps=4.7 q=-0.0 size= 103424kB time=00:00:16.61 bitrate=51008.4kbits/s speed=0.469x \nframe= 167 fps=4.7 q=-0.0 Lsize= 104349kB time=00:00:16.61 bitrate=51464.4kbits/s speed=0.466x \nvideo:104348kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000019%\nDone predctionAirNow.gif !!!\n", + "history_begin_time" : 1655161520207, + "history_end_time" : 1655217148977, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "f19XFqsqYdI4", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + \"/cmaq/testing.csv\")\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='CMAQ12KM_O3(ppb)', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"Actual CMAQ: %Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving Prediction Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='CMAQ12KM_O3(ppb)', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t_Airnow), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"Actual AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n print(\"Saving AirNow/Map: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_\" + str(t_Airnow) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/ActualCMAQ.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/ActualAirNow.mp4\", images, fps=10)\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/ActualCMAQ.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/ActualCMAQ.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/ActualCMAQ.mp4')\nprint(\"Done prediction.gif !!!\")\n\nprint(\"Generating predctionAirNow.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/ActualAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/ActualAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/ActualAirNow.mp4')\nprint(\"Done predctionAirNow.gif !!!\")\n\n", + "history_output" : "Saving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 7)\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-10 (Time: 11)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 7)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-09 (Time: 11)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 7)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-08 (Time: 11)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 7)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-07 (Time: 11)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 7)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-06 (Time: 11)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 7)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-05 (Time: 11)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 12)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 13)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 14)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 15)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 16)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 17)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 18)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 19)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 20)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 21)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 22)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 23)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 0)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 1)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 2)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 3)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 4)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 5)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 6)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 7)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 8)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 9)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 10)\nSaving Prediction Map: Actual CMAQ: 2022-06-04 (Time: 11)\nSaving AirNow/Map: 2022-06-10 (Time: 12)\nSaving AirNow/Map: 2022-06-10 (Time: 13)\nSaving AirNow/Map: 2022-06-10 (Time: 14)\nSaving AirNow/Map: 2022-06-10 (Time: 15)\nSaving AirNow/Map: 2022-06-10 (Time: 16)\nSaving AirNow/Map: 2022-06-10 (Time: 17)\nSaving AirNow/Map: 2022-06-10 (Time: 18)\nSaving AirNow/Map: 2022-06-10 (Time: 19)\nSaving AirNow/Map: 2022-06-10 (Time: 20)\nSaving AirNow/Map: 2022-06-10 (Time: 21)\nSaving AirNow/Map: 2022-06-10 (Time: 22)\nSaving AirNow/Map: 2022-06-10 (Time: 23)\nSaving AirNow/Map: 2022-06-10 (Time: 0)\nSaving AirNow/Map: 2022-06-10 (Time: 1)\nSaving AirNow/Map: 2022-06-10 (Time: 2)\nSaving AirNow/Map: 2022-06-10 (Time: 3)\nSaving AirNow/Map: 2022-06-10 (Time: 4)\nSaving AirNow/Map: 2022-06-10 (Time: 5)\nSaving AirNow/Map: 2022-06-10 (Time: 6)\nSaving AirNow/Map: 2022-06-10 (Time: 7)\nSaving AirNow/Map: 2022-06-10 (Time: 8)\nSaving AirNow/Map: 2022-06-10 (Time: 9)\nSaving AirNow/Map: 2022-06-10 (Time: 10)\nSaving AirNow/Map: 2022-06-10 (Time: 11)\nSaving AirNow/Map: 2022-06-09 (Time: 12)\nSaving AirNow/Map: 2022-06-09 (Time: 13)\nSaving AirNow/Map: 2022-06-09 (Time: 14)\nSaving AirNow/Map: 2022-06-09 (Time: 15)\nSaving AirNow/Map: 2022-06-09 (Time: 16)\nSaving AirNow/Map: 2022-06-09 (Time: 17)\nSaving AirNow/Map: 2022-06-09 (Time: 18)\nSaving AirNow/Map: 2022-06-09 (Time: 19)\nSaving AirNow/Map: 2022-06-09 (Time: 20)\nSaving AirNow/Map: 2022-06-09 (Time: 21)\nSaving AirNow/Map: 2022-06-09 (Time: 22)\nSaving AirNow/Map: 2022-06-09 (Time: 23)\nSaving AirNow/Map: 2022-06-09 (Time: 0)\nSaving AirNow/Map: 2022-06-09 (Time: 1)\nSaving AirNow/Map: 2022-06-09 (Time: 2)\nSaving AirNow/Map: 2022-06-09 (Time: 3)\nSaving AirNow/Map: 2022-06-09 (Time: 4)\nSaving AirNow/Map: 2022-06-09 (Time: 5)\nSaving AirNow/Map: 2022-06-09 (Time: 6)\nSaving AirNow/Map: 2022-06-09 (Time: 7)\nSaving AirNow/Map: 2022-06-09 (Time: 8)\nSaving AirNow/Map: 2022-06-09 (Time: 9)\nSaving AirNow/Map: 2022-06-09 (Time: 10)\nSaving AirNow/Map: 2022-06-09 (Time: 11)\nSaving AirNow/Map: 2022-06-08 (Time: 12)\nSaving AirNow/Map: 2022-06-08 (Time: 13)\nSaving AirNow/Map: 2022-06-08 (Time: 14)\nSaving AirNow/Map: 2022-06-08 (Time: 15)\nSaving AirNow/Map: 2022-06-08 (Time: 16)\nSaving AirNow/Map: 2022-06-08 (Time: 17)\nSaving AirNow/Map: 2022-06-08 (Time: 18)\nSaving AirNow/Map: 2022-06-08 (Time: 19)\nSaving AirNow/Map: 2022-06-08 (Time: 20)\nSaving AirNow/Map: 2022-06-08 (Time: 21)\nSaving AirNow/Map: 2022-06-08 (Time: 22)\nSaving AirNow/Map: 2022-06-08 (Time: 23)\nSaving AirNow/Map: 2022-06-08 (Time: 0)\nSaving AirNow/Map: 2022-06-08 (Time: 1)\nSaving AirNow/Map: 2022-06-08 (Time: 2)\nSaving AirNow/Map: 2022-06-08 (Time: 3)\nSaving AirNow/Map: 2022-06-08 (Time: 4)\nSaving AirNow/Map: 2022-06-08 (Time: 5)\nSaving AirNow/Map: 2022-06-08 (Time: 6)\nSaving AirNow/Map: 2022-06-08 (Time: 7)\nSaving AirNow/Map: 2022-06-08 (Time: 8)\nSaving AirNow/Map: 2022-06-08 (Time: 9)\nSaving AirNow/Map: 2022-06-08 (Time: 10)\nSaving AirNow/Map: 2022-06-08 (Time: 11)\nSaving AirNow/Map: 2022-06-07 (Time: 12)\nSaving AirNow/Map: 2022-06-07 (Time: 13)\nSaving AirNow/Map: 2022-06-07 (Time: 14)\nSaving AirNow/Map: 2022-06-07 (Time: 15)\nSaving AirNow/Map: 2022-06-07 (Time: 16)\nSaving AirNow/Map: 2022-06-07 (Time: 17)\nSaving AirNow/Map: 2022-06-07 (Time: 18)\nSaving AirNow/Map: 2022-06-07 (Time: 19)\nSaving AirNow/Map: 2022-06-07 (Time: 20)\nSaving AirNow/Map: 2022-06-07 (Time: 21)\nSaving AirNow/Map: 2022-06-07 (Time: 22)\nSaving AirNow/Map: 2022-06-07 (Time: 23)\nSaving AirNow/Map: 2022-06-07 (Time: 0)\nSaving AirNow/Map: 2022-06-07 (Time: 1)\nSaving AirNow/Map: 2022-06-07 (Time: 2)\nSaving AirNow/Map: 2022-06-07 (Time: 3)\nSaving AirNow/Map: 2022-06-07 (Time: 4)\nSaving AirNow/Map: 2022-06-07 (Time: 5)\nSaving AirNow/Map: 2022-06-07 (Time: 6)\nSaving AirNow/Map: 2022-06-07 (Time: 7)\nSaving AirNow/Map: 2022-06-07 (Time: 8)\nSaving AirNow/Map: 2022-06-07 (Time: 9)\nSaving AirNow/Map: 2022-06-07 (Time: 10)\nSaving AirNow/Map: 2022-06-07 (Time: 11)\nSaving AirNow/Map: 2022-06-06 (Time: 12)\nSaving AirNow/Map: 2022-06-06 (Time: 13)\nSaving AirNow/Map: 2022-06-06 (Time: 14)\nSaving AirNow/Map: 2022-06-06 (Time: 15)\nSaving AirNow/Map: 2022-06-06 (Time: 16)\nSaving AirNow/Map: 2022-06-06 (Time: 17)\nSaving AirNow/Map: 2022-06-06 (Time: 18)\nSaving AirNow/Map: 2022-06-06 (Time: 19)\nSaving AirNow/Map: 2022-06-06 (Time: 20)\nSaving AirNow/Map: 2022-06-06 (Time: 21)\nSaving AirNow/Map: 2022-06-06 (Time: 22)\nSaving AirNow/Map: 2022-06-06 (Time: 23)\nSaving AirNow/Map: 2022-06-06 (Time: 0)\nSaving AirNow/Map: 2022-06-06 (Time: 1)\nSaving AirNow/Map: 2022-06-06 (Time: 2)\nSaving AirNow/Map: 2022-06-06 (Time: 3)\nSaving AirNow/Map: 2022-06-06 (Time: 4)\nSaving AirNow/Map: 2022-06-06 (Time: 5)\nSaving AirNow/Map: 2022-06-06 (Time: 6)\nSaving AirNow/Map: 2022-06-06 (Time: 7)\nSaving AirNow/Map: 2022-06-06 (Time: 8)\nSaving AirNow/Map: 2022-06-06 (Time: 9)\nSaving AirNow/Map: 2022-06-06 (Time: 10)\nSaving AirNow/Map: 2022-06-06 (Time: 11)\nSaving AirNow/Map: 2022-06-05 (Time: 12)\nSaving AirNow/Map: 2022-06-05 (Time: 13)\nSaving AirNow/Map: 2022-06-05 (Time: 14)\nSaving AirNow/Map: 2022-06-05 (Time: 15)\nSaving AirNow/Map: 2022-06-05 (Time: 16)\nSaving AirNow/Map: 2022-06-05 (Time: 17)\nSaving AirNow/Map: 2022-06-05 (Time: 18)\nSaving AirNow/Map: 2022-06-05 (Time: 19)\nSaving AirNow/Map: 2022-06-05 (Time: 20)\nSaving AirNow/Map: 2022-06-05 (Time: 21)\nSaving AirNow/Map: 2022-06-05 (Time: 22)\nSaving AirNow/Map: 2022-06-05 (Time: 23)\nSaving AirNow/Map: 2022-06-05 (Time: 0)\nSaving AirNow/Map: 2022-06-05 (Time: 1)\nSaving AirNow/Map: 2022-06-05 (Time: 2)\nSaving AirNow/Map: 2022-06-05 (Time: 3)\nSaving AirNow/Map: 2022-06-05 (Time: 4)\nSaving AirNow/Map: 2022-06-05 (Time: 5)\nSaving AirNow/Map: 2022-06-05 (Time: 6)\nSaving AirNow/Map: 2022-06-05 (Time: 7)\nSaving AirNow/Map: 2022-06-05 (Time: 8)\nSaving AirNow/Map: 2022-06-05 (Time: 9)\nSaving AirNow/Map: 2022-06-05 (Time: 10)\nSaving AirNow/Map: 2022-06-05 (Time: 11)\nSaving AirNow/Map: 2022-06-04 (Time: 12)\nSaving AirNow/Map: 2022-06-04 (Time: 13)\nSaving AirNow/Map: 2022-06-04 (Time: 14)\nSaving AirNow/Map: 2022-06-04 (Time: 15)\nSaving AirNow/Map: 2022-06-04 (Time: 16)\nSaving AirNow/Map: 2022-06-04 (Time: 17)\nSaving AirNow/Map: 2022-06-04 (Time: 18)\nSaving AirNow/Map: 2022-06-04 (Time: 19)\nSaving AirNow/Map: 2022-06-04 (Time: 20)\nSaving AirNow/Map: 2022-06-04 (Time: 21)\nSaving AirNow/Map: 2022-06-04 (Time: 22)\nSaving AirNow/Map: 2022-06-04 (Time: 23)\nSaving AirNow/Map: 2022-06-04 (Time: 0)\nSaving AirNow/Map: 2022-06-04 (Time: 1)\nSaving AirNow/Map: 2022-06-04 (Time: 2)\nSaving AirNow/Map: 2022-06-04 (Time: 3)\nSaving AirNow/Map: 2022-06-04 (Time: 4)\nSaving AirNow/Map: 2022-06-04 (Time: 5)\nSaving AirNow/Map: 2022-06-04 (Time: 6)\nSaving AirNow/Map: 2022-06-04 (Time: 7)\nSaving AirNow/Map: 2022-06-04 (Time: 8)\nSaving AirNow/Map: 2022-06-04 (Time: 9)\nSaving AirNow/Map: 2022-06-04 (Time: 10)\nSaving AirNow/Map: 2022-06-04 (Time: 11)\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060400.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060401.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060402.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060403.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060404.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060405.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060406.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060407.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060408.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060409.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060410.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060411.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060412.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060413.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060414.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060415.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060416.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060417.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060418.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060419.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060420.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060421.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060422.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060423.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060500.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060501.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060502.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060503.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060504.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060505.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060506.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060507.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060508.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060509.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060510.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060511.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060512.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060513.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060514.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060515.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060516.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060517.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060518.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060519.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060520.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060521.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060522.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060523.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060723.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060800.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060801.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060802.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060803.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060804.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060805.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060806.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060807.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060808.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060809.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060810.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060811.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060812.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060813.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060814.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060815.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060816.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060817.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060818.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060819.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060820.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060821.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060822.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060823.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060900.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060901.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060902.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060903.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060904.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060905.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060906.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060907.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060908.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060909.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060910.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060911.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060912.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060913.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060914.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060915.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060916.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060917.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060918.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060919.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060920.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060921.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060922.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022060923.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061000.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061001.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061002.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061003.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061004.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061005.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061006.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061007.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061008.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061009.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061010.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061011.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061014.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061015.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061016.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061017.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061018.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061019.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061020.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061021.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061022.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualCMAQ_2022061023.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060400.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060401.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060402.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060403.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060404.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060405.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060406.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060407.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060408.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060409.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060410.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060411.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060412.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060413.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060414.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060415.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060416.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060417.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060418.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060419.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060420.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060421.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060422.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060423.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060500.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060501.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060502.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060503.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060504.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060505.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060506.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060507.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060508.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060509.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060510.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060511.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060512.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060513.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060514.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060515.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060516.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060517.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060518.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060519.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060520.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060521.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060522.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060523.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060723.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060800.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060801.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060802.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060803.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060804.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060805.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060806.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060807.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060808.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060809.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060810.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060811.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060812.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060813.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060814.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060815.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060816.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060817.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060818.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060819.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060820.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060821.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060822.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060823.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060900.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060901.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060902.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060903.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060904.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060905.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060906.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060907.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060908.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060909.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060910.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060911.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060912.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060913.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060914.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060915.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060916.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060917.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060918.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060919.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060920.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060921.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060922.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022060923.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061000.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061001.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061002.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061003.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061004.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061005.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061006.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061007.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061008.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061009.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061010.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061011.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061014.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061015.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061016.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061017.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061018.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061019.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061020.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061021.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061022.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/ActualAirNow_2022061023.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nGenerating prediction.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/ActualCMAQ.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:16.80, start: 0.000000, bitrate: 4642 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 4641 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/ActualCMAQ.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0x1559700] \u001B[0m255(+1) colors generated out of 379367 colors; ratio=0.000672\nframe= 1 fps=0.2 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00198x \nframe= 6 fps=1.1 q=-0.0 size= 2816kB time=00:00:00.51 bitrate=45232.7kbits/s speed=0.0902x \nframe= 10 fps=1.6 q=-0.0 size= 4864kB time=00:00:00.91 bitrate=43786.7kbits/s speed=0.147x \nframe= 14 fps=2.1 q=-0.0 size= 7168kB time=00:00:01.31 bitrate=44824.6kbits/s speed=0.194x \nframe= 18 fps=2.5 q=-0.0 size= 9216kB time=00:00:01.71 bitrate=44150.6kbits/s speed=0.235x \nframe= 22 fps=2.8 q=-0.0 size= 11776kB time=00:00:02.11 bitrate=45719.9kbits/s speed=0.269x \nframe= 26 fps=3.1 q=-0.0 size= 14080kB time=00:00:02.51 bitrate=45953.5kbits/s speed=0.299x \nframe= 30 fps=3.4 q=-0.0 size= 16384kB time=00:00:02.91 bitrate=46122.9kbits/s speed=0.325x \nframe= 34 fps=3.6 q=-0.0 size= 18432kB time=00:00:03.31 bitrate=45617.8kbits/s speed=0.349x \nframe= 38 fps=3.8 q=-0.0 size= 20480kB time=00:00:03.71 bitrate=45221.6kbits/s speed=0.37x \nframe= 42 fps=4.0 q=-0.0 size= 22784kB time=00:00:04.11 bitrate=45412.8kbits/s speed=0.389x \nframe= 46 fps=4.1 q=-0.0 size= 25088kB time=00:00:04.51 bitrate=45570.0kbits/s speed=0.406x \nframe= 50 fps=4.3 q=-0.0 size= 27392kB time=00:00:04.91 bitrate=45701.7kbits/s speed=0.421x \nframe= 54 fps=4.4 q=-0.0 size= 29696kB time=00:00:05.31 bitrate=45813.5kbits/s speed=0.436x \nframe= 58 fps=4.6 q=-0.0 size= 31744kB time=00:00:05.71 bitrate=45542.4kbits/s speed=0.449x \nframe= 62 fps=4.7 q=-0.0 size= 34048kB time=00:00:06.11 bitrate=45650.0kbits/s speed=0.461x \nframe= 66 fps=4.8 q=-0.0 size= 36096kB time=00:00:06.51 bitrate=45422.2kbits/s speed=0.472x \nframe= 70 fps=4.9 q=-0.0 size= 38400kB time=00:00:06.91 bitrate=45524.3kbits/s speed=0.482x \nframe= 74 fps=5.0 q=-0.0 size= 40704kB time=00:00:07.31 bitrate=45615.2kbits/s speed=0.491x \nframe= 78 fps=5.1 q=-0.0 size= 43008kB time=00:00:07.71 bitrate=45696.7kbits/s speed= 0.5x \nframe= 82 fps=5.2 q=-0.0 size= 45312kB time=00:00:08.11 bitrate=45770.1kbits/s speed=0.509x \nframe= 86 fps=5.2 q=-0.0 size= 47616kB time=00:00:08.51 bitrate=45836.7kbits/s speed=0.518x \nframe= 90 fps=5.3 q=-0.0 size= 49920kB time=00:00:08.91 bitrate=45897.3kbits/s speed=0.526x \nframe= 94 fps=5.4 q=-0.0 size= 52224kB time=00:00:09.31 bitrate=45952.6kbits/s speed=0.533x \nframe= 98 fps=5.4 q=-0.0 size= 54528kB time=00:00:09.71 bitrate=46003.4kbits/s speed=0.539x \nframe= 102 fps=5.5 q=-0.0 size= 56832kB time=00:00:10.11 bitrate=46050.2kbits/s speed=0.546x \nframe= 106 fps=5.6 q=-0.0 size= 58880kB time=00:00:10.51 bitrate=45893.9kbits/s speed=0.552x \nframe= 110 fps=5.6 q=-0.0 size= 61184kB time=00:00:10.91 bitrate=45941.3kbits/s speed=0.559x \nframe= 115 fps=5.7 q=-0.0 size= 64000kB time=00:00:11.41 bitrate=45949.9kbits/s speed=0.566x \nframe= 119 fps=5.8 q=-0.0 size= 66560kB time=00:00:11.81 bitrate=46169.3kbits/s speed=0.571x \nframe= 123 fps=5.8 q=-0.0 size= 68864kB time=00:00:12.21 bitrate=46202.6kbits/s speed=0.576x \nframe= 127 fps=5.8 q=-0.0 size= 70912kB time=00:00:12.61 bitrate=46067.5kbits/s speed=0.581x \nframe= 131 fps=5.9 q=-0.0 size= 73216kB time=00:00:13.01 bitrate=46101.9kbits/s speed=0.585x \nframe= 135 fps=5.9 q=-0.0 size= 75520kB time=00:00:13.41 bitrate=46134.2kbits/s speed=0.589x \nframe= 139 fps=6.0 q=-0.0 size= 77824kB time=00:00:13.81 bitrate=46164.7kbits/s speed=0.594x \nframe= 143 fps=6.0 q=-0.0 size= 80128kB time=00:00:14.21 bitrate=46193.4kbits/s speed=0.597x \nframe= 147 fps=6.0 q=-0.0 size= 82432kB time=00:00:14.61 bitrate=46220.6kbits/s speed=0.601x \nframe= 151 fps=6.1 q=-0.0 size= 84736kB time=00:00:15.01 bitrate=46246.3kbits/s speed=0.604x \nframe= 155 fps=6.1 q=-0.0 size= 87040kB time=00:00:15.41 bitrate=46270.7kbits/s speed=0.608x \nframe= 159 fps=6.1 q=-0.0 size= 89088kB time=00:00:15.81 bitrate=46161.2kbits/s speed=0.61x \nframe= 163 fps=6.2 q=-0.0 size= 91392kB time=00:00:16.21 bitrate=46186.5kbits/s speed=0.614x \nframe= 167 fps=6.2 q=-0.0 size= 93952kB time=00:00:16.61 bitrate=46336.8kbits/s speed=0.616x \nframe= 167 fps=6.2 q=-0.0 Lsize= 94611kB time=00:00:16.61 bitrate=46662.0kbits/s speed=0.613x \nvideo:94611kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000021%\nDone prediction.gif !!!\nGenerating predctionAirNow.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/ActualAirNow.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:16.80, start: 0.000000, bitrate: 5629 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 5627 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/ActualAirNow.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0xc53a00] \u001B[0m255(+1) colors generated out of 983683 colors; ratio=0.000259\n\u001B[1;32m[Parsed_palettegen_2 @ 0xc53a00] \u001B[0m\u001B[0;33mDupped color: FF46A277\n\u001B[0m\u001B[1;32m[Parsed_palettegen_2 @ 0xc53a00] \u001B[0m\u001B[0;33mDupped color: FF46A374\n\u001B[0mframe= 1 fps=0.1 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00142x \nframe= 5 fps=0.7 q=-0.0 size= 2304kB time=00:00:00.41 bitrate=46035.0kbits/s speed=0.0544x \nframe= 9 fps=1.1 q=-0.0 size= 4608kB time=00:00:00.81 bitrate=46603.4kbits/s speed=0.0991x \nframe= 13 fps=1.5 q=-0.0 size= 6912kB time=00:00:01.21 bitrate=46796.0kbits/s speed=0.137x \nframe= 17 fps=1.8 q=-0.0 size= 9472kB time=00:00:01.61 bitrate=48195.4kbits/s speed=0.17x \nframe= 21 fps=2.1 q=-0.0 size= 11776kB time=00:00:02.01 bitrate=47994.5kbits/s speed=0.199x \nframe= 25 fps=2.3 q=-0.0 size= 14336kB time=00:00:02.41 bitrate=48730.5kbits/s speed=0.224x \nframe= 29 fps=2.6 q=-0.0 size= 16896kB time=00:00:02.81 bitrate=49257.0kbits/s speed=0.247x \nframe= 33 fps=2.8 q=-0.0 size= 19200kB time=00:00:03.21 bitrate=48998.9kbits/s speed=0.268x \nframe= 37 fps=2.9 q=-0.0 size= 21248kB time=00:00:03.61 bitrate=48217.1kbits/s speed=0.287x \nframe= 41 fps=3.1 q=-0.0 size= 23808kB time=00:00:04.01 bitrate=48637.2kbits/s speed=0.304x \nframe= 45 fps=3.3 q=-0.0 size= 26112kB time=00:00:04.41 bitrate=48505.6kbits/s speed=0.32x \nframe= 49 fps=3.4 q=-0.0 size= 28672kB time=00:00:04.81 bitrate=48831.8kbits/s speed=0.333x \nframe= 53 fps=3.5 q=-0.0 size= 30976kB time=00:00:05.21 bitrate=48705.4kbits/s speed=0.347x \nframe= 57 fps=3.6 q=-0.0 size= 33280kB time=00:00:05.61 bitrate=48597.1kbits/s speed=0.359x \nframe= 61 fps=3.8 q=-0.0 size= 35584kB time=00:00:06.01 bitrate=48503.2kbits/s speed=0.37x \nframe= 65 fps=3.9 q=-0.0 size= 37888kB time=00:00:06.41 bitrate=48421.0kbits/s speed=0.38x \nframe= 69 fps=3.9 q=-0.0 size= 40192kB time=00:00:06.81 bitrate=48348.4kbits/s speed=0.39x \nframe= 73 fps=4.0 q=-0.0 size= 42752kB time=00:00:07.21 bitrate=48574.8kbits/s speed=0.398x \nframe= 77 fps=4.1 q=-0.0 size= 45312kB time=00:00:07.61 bitrate=48777.4kbits/s speed=0.407x \nframe= 81 fps=4.2 q=-0.0 size= 47616kB time=00:00:08.01 bitrate=48697.9kbits/s speed=0.415x \nframe= 85 fps=4.3 q=-0.0 size= 49920kB time=00:00:08.41 bitrate=48626.0kbits/s speed=0.422x \nframe= 89 fps=4.3 q=-0.0 size= 52480kB time=00:00:08.81 bitrate=48798.7kbits/s speed=0.429x \nframe= 93 fps=4.4 q=-0.0 size= 54784kB time=00:00:09.21 bitrate=48728.6kbits/s speed=0.435x \nframe= 97 fps=4.4 q=-0.0 size= 57088kB time=00:00:09.61 bitrate=48664.4kbits/s speed=0.44x \nframe= 101 fps=4.5 q=-0.0 size= 59648kB time=00:00:10.01 bitrate=48814.8kbits/s speed=0.447x \nframe= 105 fps=4.6 q=-0.0 size= 62208kB time=00:00:10.41 bitrate=48953.7kbits/s speed=0.452x \nframe= 109 fps=4.6 q=-0.0 size= 64512kB time=00:00:10.81 bitrate=48888.3kbits/s speed=0.458x \nframe= 113 fps=4.7 q=-0.0 size= 66816kB time=00:00:11.21 bitrate=48827.5kbits/s speed=0.463x \nframe= 117 fps=4.7 q=-0.0 size= 69376kB time=00:00:11.61 bitrate=48951.6kbits/s speed=0.467x \nframe= 121 fps=4.7 q=-0.0 size= 71680kB time=00:00:12.01 bitrate=48892.8kbits/s speed=0.471x \nframe= 125 fps=4.8 q=-0.0 size= 74240kB time=00:00:12.41 bitrate=49006.8kbits/s speed=0.475x \nframe= 129 fps=4.8 q=-0.0 size= 76544kB time=00:00:12.81 bitrate=48949.9kbits/s speed=0.479x \nframe= 133 fps=4.9 q=-0.0 size= 78848kB time=00:00:13.21 bitrate=48896.5kbits/s speed=0.482x \nframe= 137 fps=4.9 q=-0.0 size= 81408kB time=00:00:13.61 bitrate=49000.3kbits/s speed=0.486x \nframe= 141 fps=4.9 q=-0.0 size= 83968kB time=00:00:14.01 bitrate=49098.2kbits/s speed=0.489x \nframe= 145 fps=5.0 q=-0.0 size= 86272kB time=00:00:14.41 bitrate=49045.1kbits/s speed=0.492x \nframe= 149 fps=5.0 q=-0.0 size= 88832kB time=00:00:14.81 bitrate=49136.5kbits/s speed=0.495x \nframe= 153 fps=5.0 q=-0.0 size= 91136kB time=00:00:15.21 bitrate=49085.2kbits/s speed=0.498x \nframe= 157 fps=5.0 q=-0.0 size= 93440kB time=00:00:15.61 bitrate=49036.5kbits/s speed=0.501x \nframe= 161 fps=5.1 q=-0.0 size= 96000kB time=00:00:16.01 bitrate=49121.3kbits/s speed=0.504x \nframe= 165 fps=5.1 q=-0.0 size= 98304kB time=00:00:16.41 bitrate=49074.1kbits/s speed=0.506x \nframe= 167 fps=5.1 q=-0.0 Lsize= 100257kB time=00:00:16.61 bitrate=49446.7kbits/s speed=0.505x \nvideo:100257kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000019%\nDone predctionAirNow.gif !!!\n", + "history_begin_time" : 1655141747936, + "history_end_time" : 1655160671413, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "5jFwZlD4ufey", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n", + "history_output" : "Running", + "history_begin_time" : 1655141374365, + "history_end_time" : 1655160670882, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "QFGVRAKoxlCx", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving...: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t_Airnow), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n print(\"Saving...: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t_Airnow) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\nprint(\"Generating predctionAirNow.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\nprint(\"Done predctionAirNow.gif !!!\")\n\n", + "history_output" : "Saving...: 2022-06-10 (Time: 12)\nSaving...: 2022-06-10 (Time: 13)\nSaving...: 2022-06-10 (Time: 14)\nSaving...: 2022-06-10 (Time: 15)\nSaving...: 2022-06-10 (Time: 16)\nSaving...: 2022-06-10 (Time: 17)\nSaving...: 2022-06-10 (Time: 18)\nSaving...: 2022-06-10 (Time: 19)\nSaving...: 2022-06-10 (Time: 20)\nSaving...: 2022-06-10 (Time: 21)\nSaving...: 2022-06-10 (Time: 22)\nSaving...: 2022-06-10 (Time: 23)\nSaving...: 2022-06-10 (Time: 0)\nSaving...: 2022-06-10 (Time: 1)\nSaving...: 2022-06-10 (Time: 2)\nSaving...: 2022-06-10 (Time: 3)\nSaving...: 2022-06-10 (Time: 4)\nSaving...: 2022-06-10 (Time: 5)\nSaving...: 2022-06-10 (Time: 6)\nSaving...: 2022-06-10 (Time: 7)\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\nSaving...: 2022-06-10 (Time: 8)\nSaving...: 2022-06-10 (Time: 9)\nSaving...: 2022-06-10 (Time: 10)\nSaving...: 2022-06-10 (Time: 11)\nSaving...: 2022-06-09 (Time: 12)\nSaving...: 2022-06-09 (Time: 13)\nSaving...: 2022-06-09 (Time: 14)\nSaving...: 2022-06-09 (Time: 15)\nSaving...: 2022-06-09 (Time: 16)\nSaving...: 2022-06-09 (Time: 17)\nSaving...: 2022-06-09 (Time: 18)\nSaving...: 2022-06-09 (Time: 19)\nSaving...: 2022-06-09 (Time: 20)\nSaving...: 2022-06-09 (Time: 21)\nSaving...: 2022-06-09 (Time: 22)\nSaving...: 2022-06-09 (Time: 23)\nSaving...: 2022-06-09 (Time: 0)\nSaving...: 2022-06-09 (Time: 1)\nSaving...: 2022-06-09 (Time: 2)\nSaving...: 2022-06-09 (Time: 3)\nSaving...: 2022-06-09 (Time: 4)\nSaving...: 2022-06-09 (Time: 5)\nSaving...: 2022-06-09 (Time: 6)\nSaving...: 2022-06-09 (Time: 7)\nSaving...: 2022-06-09 (Time: 8)\nSaving...: 2022-06-09 (Time: 9)\nSaving...: 2022-06-09 (Time: 10)\nSaving...: 2022-06-09 (Time: 11)\nSaving...: 2022-06-08 (Time: 12)\nSaving...: 2022-06-08 (Time: 13)\nSaving...: 2022-06-08 (Time: 14)\nSaving...: 2022-06-08 (Time: 15)\nSaving...: 2022-06-08 (Time: 16)\nSaving...: 2022-06-08 (Time: 17)\nSaving...: 2022-06-08 (Time: 18)\nSaving...: 2022-06-08 (Time: 19)\nSaving...: 2022-06-08 (Time: 20)\nSaving...: 2022-06-08 (Time: 21)\nSaving...: 2022-06-08 (Time: 22)\nSaving...: 2022-06-08 (Time: 23)\nSaving...: 2022-06-08 (Time: 0)\nSaving...: 2022-06-08 (Time: 1)\nSaving...: 2022-06-08 (Time: 2)\nSaving...: 2022-06-08 (Time: 3)\nSaving...: 2022-06-08 (Time: 4)\nSaving...: 2022-06-08 (Time: 5)\nSaving...: 2022-06-08 (Time: 6)\nSaving...: 2022-06-08 (Time: 7)\nSaving...: 2022-06-08 (Time: 8)\nSaving...: 2022-06-08 (Time: 9)\nSaving...: 2022-06-08 (Time: 10)\nSaving...: 2022-06-08 (Time: 11)\nSaving...: 2022-06-07 (Time: 12)\nSaving...: 2022-06-07 (Time: 13)\nSaving...: 2022-06-07 (Time: 14)\nSaving...: 2022-06-07 (Time: 15)\nSaving...: 2022-06-07 (Time: 16)\nSaving...: 2022-06-07 (Time: 17)\nSaving...: 2022-06-07 (Time: 18)\nSaving...: 2022-06-07 (Time: 19)\nSaving...: 2022-06-07 (Time: 20)\nSaving...: 2022-06-07 (Time: 21)\nSaving...: 2022-06-07 (Time: 22)\nSaving...: 2022-06-07 (Time: 23)\nSaving...: 2022-06-07 (Time: 0)\nSaving...: 2022-06-07 (Time: 1)\nSaving...: 2022-06-07 (Time: 2)\nSaving...: 2022-06-07 (Time: 3)\nSaving...: 2022-06-07 (Time: 4)\nSaving...: 2022-06-07 (Time: 5)\nSaving...: 2022-06-07 (Time: 6)\nSaving...: 2022-06-07 (Time: 7)\nSaving...: 2022-06-07 (Time: 8)\nSaving...: 2022-06-07 (Time: 9)\nSaving...: 2022-06-07 (Time: 10)\nSaving...: 2022-06-07 (Time: 11)\nSaving...: 2022-06-06 (Time: 12)\nSaving...: 2022-06-06 (Time: 13)\nSaving...: 2022-06-06 (Time: 14)\nSaving...: 2022-06-06 (Time: 15)\nSaving...: 2022-06-06 (Time: 16)\nSaving...: 2022-06-06 (Time: 17)\nSaving...: 2022-06-06 (Time: 18)\nSaving...: 2022-06-06 (Time: 19)\nSaving...: 2022-06-06 (Time: 20)\nSaving...: 2022-06-06 (Time: 21)\nSaving...: 2022-06-06 (Time: 22)\nSaving...: 2022-06-06 (Time: 23)\nSaving...: 2022-06-06 (Time: 0)\nSaving...: 2022-06-06 (Time: 1)\nSaving...: 2022-06-06 (Time: 2)\nSaving...: 2022-06-06 (Time: 3)\nSaving...: 2022-06-06 (Time: 4)\nSaving...: 2022-06-06 (Time: 5)\nSaving...: 2022-06-06 (Time: 6)\nSaving...: 2022-06-06 (Time: 7)\nSaving...: 2022-06-06 (Time: 8)\nSaving...: 2022-06-06 (Time: 9)\nSaving...: 2022-06-06 (Time: 10)\nSaving...: 2022-06-06 (Time: 11)\nSaving...: 2022-06-05 (Time: 12)\nSaving...: 2022-06-05 (Time: 13)\nSaving...: 2022-06-05 (Time: 14)\nSaving...: 2022-06-05 (Time: 15)\nSaving...: 2022-06-05 (Time: 16)\nSaving...: 2022-06-05 (Time: 17)\nSaving...: 2022-06-05 (Time: 18)\nSaving...: 2022-06-05 (Time: 19)\nSaving...: 2022-06-05 (Time: 20)\nSaving...: 2022-06-05 (Time: 21)\nSaving...: 2022-06-05 (Time: 22)\nSaving...: 2022-06-05 (Time: 23)\nSaving...: 2022-06-05 (Time: 0)\nSaving...: 2022-06-05 (Time: 1)\nSaving...: 2022-06-05 (Time: 2)\nSaving...: 2022-06-05 (Time: 3)\nSaving...: 2022-06-05 (Time: 4)\nSaving...: 2022-06-05 (Time: 5)\nSaving...: 2022-06-05 (Time: 6)\nSaving...: 2022-06-05 (Time: 7)\nSaving...: 2022-06-05 (Time: 8)\nSaving...: 2022-06-05 (Time: 9)\nSaving...: 2022-06-05 (Time: 10)\nSaving...: 2022-06-05 (Time: 11)\nSaving...: 2022-06-04 (Time: 12)\nSaving...: 2022-06-04 (Time: 13)\nSaving...: 2022-06-04 (Time: 14)\nSaving...: 2022-06-04 (Time: 15)\nSaving...: 2022-06-04 (Time: 16)\nSaving...: 2022-06-04 (Time: 17)\nSaving...: 2022-06-04 (Time: 18)\nSaving...: 2022-06-04 (Time: 19)\nSaving...: 2022-06-04 (Time: 20)\nSaving...: 2022-06-04 (Time: 21)\nSaving...: 2022-06-04 (Time: 22)\nSaving...: 2022-06-04 (Time: 23)\nSaving...: 2022-06-04 (Time: 0)\nSaving...: 2022-06-04 (Time: 1)\nSaving...: 2022-06-04 (Time: 2)\nSaving...: 2022-06-04 (Time: 3)\nSaving...: 2022-06-04 (Time: 4)\nSaving...: 2022-06-04 (Time: 5)\nSaving...: 2022-06-04 (Time: 6)\nSaving...: 2022-06-04 (Time: 7)\nSaving...: 2022-06-04 (Time: 8)\nSaving...: 2022-06-04 (Time: 9)\nSaving...: 2022-06-04 (Time: 10)\nSaving...: 2022-06-04 (Time: 11)\nSaving...: 2022-06-10 (Time: 12)\nSaving...: 2022-06-10 (Time: 13)\nSaving...: 2022-06-10 (Time: 14)\nSaving...: 2022-06-10 (Time: 15)\nSaving...: 2022-06-10 (Time: 16)\nSaving...: 2022-06-10 (Time: 17)\nSaving...: 2022-06-10 (Time: 18)\nSaving...: 2022-06-10 (Time: 19)\nSaving...: 2022-06-10 (Time: 20)\nSaving...: 2022-06-10 (Time: 21)\nSaving...: 2022-06-10 (Time: 22)\nSaving...: 2022-06-10 (Time: 23)\nSaving...: 2022-06-10 (Time: 0)\nSaving...: 2022-06-10 (Time: 1)\nSaving...: 2022-06-10 (Time: 2)\nSaving...: 2022-06-10 (Time: 3)\nSaving...: 2022-06-10 (Time: 4)\nSaving...: 2022-06-10 (Time: 5)\nSaving...: 2022-06-10 (Time: 6)\nSaving...: 2022-06-10 (Time: 7)\nSaving...: 2022-06-10 (Time: 8)\nSaving...: 2022-06-10 (Time: 9)\nSaving...: 2022-06-10 (Time: 10)\nSaving...: 2022-06-10 (Time: 11)\nSaving...: 2022-06-09 (Time: 12)\nSaving...: 2022-06-09 (Time: 13)\nSaving...: 2022-06-09 (Time: 14)\nSaving...: 2022-06-09 (Time: 15)\nSaving...: 2022-06-09 (Time: 16)\nSaving...: 2022-06-09 (Time: 17)\nSaving...: 2022-06-09 (Time: 18)\nSaving...: 2022-06-09 (Time: 19)\nSaving...: 2022-06-09 (Time: 20)\nSaving...: 2022-06-09 (Time: 21)\nSaving...: 2022-06-09 (Time: 22)\nSaving...: 2022-06-09 (Time: 23)\nSaving...: 2022-06-09 (Time: 0)\nSaving...: 2022-06-09 (Time: 1)\nSaving...: 2022-06-09 (Time: 2)\nSaving...: 2022-06-09 (Time: 3)\nSaving...: 2022-06-09 (Time: 4)\nSaving...: 2022-06-09 (Time: 5)\nSaving...: 2022-06-09 (Time: 6)\nSaving...: 2022-06-09 (Time: 7)\nSaving...: 2022-06-09 (Time: 8)\nSaving...: 2022-06-09 (Time: 9)\nSaving...: 2022-06-09 (Time: 10)\nSaving...: 2022-06-09 (Time: 11)\nSaving...: 2022-06-08 (Time: 12)\nSaving...: 2022-06-08 (Time: 13)\nSaving...: 2022-06-08 (Time: 14)\nSaving...: 2022-06-08 (Time: 15)\nSaving...: 2022-06-08 (Time: 16)\nSaving...: 2022-06-08 (Time: 17)\nSaving...: 2022-06-08 (Time: 18)\nSaving...: 2022-06-08 (Time: 19)\nSaving...: 2022-06-08 (Time: 20)\nSaving...: 2022-06-08 (Time: 21)\nSaving...: 2022-06-08 (Time: 22)\nSaving...: 2022-06-08 (Time: 23)\nSaving...: 2022-06-08 (Time: 0)\nSaving...: 2022-06-08 (Time: 1)\nSaving...: 2022-06-08 (Time: 2)\nSaving...: 2022-06-08 (Time: 3)\nSaving...: 2022-06-08 (Time: 4)\nSaving...: 2022-06-08 (Time: 5)\nSaving...: 2022-06-08 (Time: 6)\nSaving...: 2022-06-08 (Time: 7)\nSaving...: 2022-06-08 (Time: 8)\nSaving...: 2022-06-08 (Time: 9)\nSaving...: 2022-06-08 (Time: 10)\nSaving...: 2022-06-08 (Time: 11)\nSaving...: 2022-06-07 (Time: 12)\nSaving...: 2022-06-07 (Time: 13)\nSaving...: 2022-06-07 (Time: 14)\nSaving...: 2022-06-07 (Time: 15)\nSaving...: 2022-06-07 (Time: 16)\nSaving...: 2022-06-07 (Time: 17)\nSaving...: 2022-06-07 (Time: 18)\nSaving...: 2022-06-07 (Time: 19)\nSaving...: 2022-06-07 (Time: 20)\nSaving...: 2022-06-07 (Time: 21)\nSaving...: 2022-06-07 (Time: 22)\nSaving...: 2022-06-07 (Time: 23)\nSaving...: 2022-06-07 (Time: 0)\nSaving...: 2022-06-07 (Time: 1)\nSaving...: 2022-06-07 (Time: 2)\nSaving...: 2022-06-07 (Time: 3)\nSaving...: 2022-06-07 (Time: 4)\nSaving...: 2022-06-07 (Time: 5)\nSaving...: 2022-06-07 (Time: 6)\nSaving...: 2022-06-07 (Time: 7)\nSaving...: 2022-06-07 (Time: 8)\nSaving...: 2022-06-07 (Time: 9)\nSaving...: 2022-06-07 (Time: 10)\nSaving...: 2022-06-07 (Time: 11)\nSaving...: 2022-06-06 (Time: 12)\nSaving...: 2022-06-06 (Time: 13)\nSaving...: 2022-06-06 (Time: 14)\nSaving...: 2022-06-06 (Time: 15)\nSaving...: 2022-06-06 (Time: 16)\nSaving...: 2022-06-06 (Time: 17)\nSaving...: 2022-06-06 (Time: 18)\nSaving...: 2022-06-06 (Time: 19)\nSaving...: 2022-06-06 (Time: 20)\nSaving...: 2022-06-06 (Time: 21)\nSaving...: 2022-06-06 (Time: 22)\nSaving...: 2022-06-06 (Time: 23)\nSaving...: 2022-06-06 (Time: 0)\nSaving...: 2022-06-06 (Time: 1)\nSaving...: 2022-06-06 (Time: 2)\nSaving...: 2022-06-06 (Time: 3)\nSaving...: 2022-06-06 (Time: 4)\nSaving...: 2022-06-06 (Time: 5)\nSaving...: 2022-06-06 (Time: 6)\nSaving...: 2022-06-06 (Time: 7)\nSaving...: 2022-06-06 (Time: 8)\nSaving...: 2022-06-06 (Time: 9)\nSaving...: 2022-06-06 (Time: 10)\nSaving...: 2022-06-06 (Time: 11)\nSaving...: 2022-06-05 (Time: 12)\nSaving...: 2022-06-05 (Time: 13)\nSaving...: 2022-06-05 (Time: 14)\nSaving...: 2022-06-05 (Time: 15)\nSaving...: 2022-06-05 (Time: 16)\nSaving...: 2022-06-05 (Time: 17)\nSaving...: 2022-06-05 (Time: 18)\nSaving...: 2022-06-05 (Time: 19)\nSaving...: 2022-06-05 (Time: 20)\nSaving...: 2022-06-05 (Time: 21)\nSaving...: 2022-06-05 (Time: 22)\nSaving...: 2022-06-05 (Time: 23)\nSaving...: 2022-06-05 (Time: 0)\nSaving...: 2022-06-05 (Time: 1)\nSaving...: 2022-06-05 (Time: 2)\nSaving...: 2022-06-05 (Time: 3)\nSaving...: 2022-06-05 (Time: 4)\nSaving...: 2022-06-05 (Time: 5)\nSaving...: 2022-06-05 (Time: 6)\nSaving...: 2022-06-05 (Time: 7)\nSaving...: 2022-06-05 (Time: 8)\nSaving...: 2022-06-05 (Time: 9)\nSaving...: 2022-06-05 (Time: 10)\nSaving...: 2022-06-05 (Time: 11)\nSaving...: 2022-06-04 (Time: 12)\nSaving...: 2022-06-04 (Time: 13)\nSaving...: 2022-06-04 (Time: 14)\nSaving...: 2022-06-04 (Time: 15)\nSaving...: 2022-06-04 (Time: 16)\nSaving...: 2022-06-04 (Time: 17)\nSaving...: 2022-06-04 (Time: 18)\nSaving...: 2022-06-04 (Time: 19)\nSaving...: 2022-06-04 (Time: 20)\nSaving...: 2022-06-04 (Time: 21)\nSaving...: 2022-06-04 (Time: 22)\nSaving...: 2022-06-04 (Time: 23)\nSaving...: 2022-06-04 (Time: 0)\nSaving...: 2022-06-04 (Time: 1)\nSaving...: 2022-06-04 (Time: 2)\nSaving...: 2022-06-04 (Time: 3)\nSaving...: 2022-06-04 (Time: 4)\nSaving...: 2022-06-04 (Time: 5)\nSaving...: 2022-06-04 (Time: 6)\nSaving...: 2022-06-04 (Time: 7)\nSaving...: 2022-06-04 (Time: 8)\nSaving...: 2022-06-04 (Time: 9)\nSaving...: 2022-06-04 (Time: 10)\nSaving...: 2022-06-04 (Time: 11)\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060400.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060401.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060402.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060403.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060404.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060405.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060406.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060407.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060408.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060409.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060410.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060411.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060412.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060413.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060414.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060415.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060416.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060417.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060418.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060419.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060420.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060421.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060422.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060423.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060500.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060501.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060502.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060503.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060504.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060505.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060506.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060507.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060508.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060509.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060510.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060511.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060512.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060513.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060514.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060515.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060516.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060517.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060518.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060519.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060520.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060521.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060522.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060523.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060723.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060800.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060801.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060802.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060803.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060804.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060805.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060806.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060807.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060808.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060809.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060810.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060811.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060812.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060813.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060814.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060815.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060816.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060817.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060818.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060819.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060820.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060821.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060822.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060823.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060900.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060901.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060902.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060903.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060904.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060905.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060906.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060907.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060908.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060909.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060910.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060911.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060912.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060913.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060914.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060915.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060916.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060917.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060918.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060919.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060920.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060921.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060922.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022060923.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061000.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061001.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061002.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061003.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061004.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061005.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061006.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061007.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061008.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061009.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061010.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061011.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061014.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061015.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061016.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061017.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061018.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061019.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061020.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061021.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061022.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022061023.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060400.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060401.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060402.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060403.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060404.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060405.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060406.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060407.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060408.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060409.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060410.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060411.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060412.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060413.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060414.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060415.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060416.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060417.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060418.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060419.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060420.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060421.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060422.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060423.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060500.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060501.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060502.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060503.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060504.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060505.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060506.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060507.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060508.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060509.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060510.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060511.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060512.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060513.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060514.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060515.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060516.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060517.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060518.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060519.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060520.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060521.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060522.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060523.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060723.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060800.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060801.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060802.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060803.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060804.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060805.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060806.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060807.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060808.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060809.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060810.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060811.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060812.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060813.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060814.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060815.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060816.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060817.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060818.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060819.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060820.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060821.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060822.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060823.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060900.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060901.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060902.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060903.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060904.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060905.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060906.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060907.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060908.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060909.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060910.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060911.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060912.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060913.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060914.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060915.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060916.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060917.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060918.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060919.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060920.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060921.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060922.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022060923.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061000.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061001.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061002.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061003.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061004.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061005.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061006.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061007.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061008.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061009.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061010.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061011.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061012.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061013.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061014.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061015.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061016.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061017.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061018.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061019.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061020.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061021.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061022.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022061023.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nGenerating prediction.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/prediction.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:16.80, start: 0.000000, bitrate: 7654 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 7653 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/prediction.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0x204ab40] \u001B[0m255(+1) colors generated out of 477188 colors; ratio=0.000534\n\u001B[1;32m[Parsed_palettegen_2 @ 0x204ab40] \u001B[0m\u001B[0;33mDupped color: FF48A66C\n\u001B[0mframe= 1 fps=0.2 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00174x \nframe= 6 fps=0.9 q=-0.0 size= 3072kB time=00:00:00.51 bitrate=49344.8kbits/s speed=0.0805x \nframe= 10 fps=1.5 q=-0.0 size= 5376kB time=00:00:00.91 bitrate=48395.8kbits/s speed=0.132x \nframe= 14 fps=1.9 q=-0.0 size= 7424kB time=00:00:01.31 bitrate=46425.5kbits/s speed=0.176x \nframe= 18 fps=2.2 q=-0.0 size= 9728kB time=00:00:01.71 bitrate=46603.4kbits/s speed=0.214x \nframe= 22 fps=2.6 q=-0.0 size= 12288kB time=00:00:02.11 bitrate=47707.7kbits/s speed=0.245x \nframe= 26 fps=2.8 q=-0.0 size= 15104kB time=00:00:02.51 bitrate=49295.6kbits/s speed=0.273x \nframe= 30 fps=3.1 q=-0.0 size= 17408kB time=00:00:02.91 bitrate=49005.6kbits/s speed=0.299x \nframe= 34 fps=3.3 q=-0.0 size= 19712kB time=00:00:03.31 bitrate=48785.7kbits/s speed=0.323x \nframe= 38 fps=3.5 q=-0.0 size= 21760kB time=00:00:03.71 bitrate=48048.0kbits/s speed=0.343x \nframe= 42 fps=3.7 q=-0.0 size= 24064kB time=00:00:04.11 bitrate=47964.1kbits/s speed=0.363x \nframe= 46 fps=3.9 q=-0.0 size= 26624kB time=00:00:04.51 bitrate=48360.0kbits/s speed=0.378x \nframe= 50 fps=4.0 q=-0.0 size= 29184kB time=00:00:04.91 bitrate=48691.5kbits/s speed=0.392x \nframe= 54 fps=4.1 q=-0.0 size= 31744kB time=00:00:05.31 bitrate=48973.0kbits/s speed=0.407x \nframe= 58 fps=4.3 q=-0.0 size= 34048kB time=00:00:05.71 bitrate=48847.8kbits/s speed=0.42x \nframe= 62 fps=4.4 q=-0.0 size= 36096kB time=00:00:06.11 bitrate=48395.8kbits/s speed=0.432x \nframe= 66 fps=4.5 q=-0.0 size= 38400kB time=00:00:06.51 bitrate=48321.5kbits/s speed=0.444x \nframe= 70 fps=4.6 q=-0.0 size= 40960kB time=00:00:06.91 bitrate=48559.2kbits/s speed=0.452x \nframe= 74 fps=4.7 q=-0.0 size= 43520kB time=00:00:07.31 bitrate=48771.0kbits/s speed=0.461x \nframe= 78 fps=4.8 q=-0.0 size= 46080kB time=00:00:07.71 bitrate=48960.7kbits/s speed=0.47x \nframe= 82 fps=4.8 q=-0.0 size= 48384kB time=00:00:08.11 bitrate=48873.2kbits/s speed=0.479x \nframe= 86 fps=4.9 q=-0.0 size= 50432kB time=00:00:08.51 bitrate=48547.5kbits/s speed=0.487x \nframe= 90 fps=5.0 q=-0.0 size= 52736kB time=00:00:08.91 bitrate=48486.3kbits/s speed=0.494x \nframe= 94 fps=5.0 q=-0.0 size= 55296kB time=00:00:09.31 bitrate=48655.7kbits/s speed= 0.5x \nframe= 98 fps=5.1 q=-0.0 size= 58112kB time=00:00:09.71 bitrate=49027.1kbits/s speed=0.506x \nframe= 102 fps=5.2 q=-0.0 size= 60416kB time=00:00:10.11 bitrate=48954.3kbits/s speed=0.512x \nframe= 106 fps=5.2 q=-0.0 size= 62720kB time=00:00:10.51 bitrate=48887.0kbits/s speed=0.518x \nframe= 110 fps=5.3 q=-0.0 size= 64768kB time=00:00:10.91 bitrate=48632.4kbits/s speed=0.524x \nframe= 114 fps=5.3 q=-0.0 size= 67072kB time=00:00:11.31 bitrate=48581.2kbits/s speed=0.529x \nframe= 118 fps=5.4 q=-0.0 size= 69632kB time=00:00:11.71 bitrate=48712.7kbits/s speed=0.534x \nframe= 122 fps=5.4 q=-0.0 size= 72448kB time=00:00:12.11 bitrate=49008.6kbits/s speed=0.538x \nframe= 126 fps=5.5 q=-0.0 size= 74752kB time=00:00:12.51 bitrate=48950.3kbits/s speed=0.543x \nframe= 130 fps=5.5 q=-0.0 size= 77056kB time=00:00:12.91 bitrate=48895.6kbits/s speed=0.547x \nframe= 134 fps=5.6 q=-0.0 size= 79104kB time=00:00:13.31 bitrate=48686.7kbits/s speed=0.552x \nframe= 138 fps=5.6 q=-0.0 size= 81408kB time=00:00:13.71 bitrate=48642.9kbits/s speed=0.556x \nframe= 142 fps=5.6 q=-0.0 size= 83968kB time=00:00:14.11 bitrate=48750.2kbits/s speed=0.559x \nframe= 146 fps=5.7 q=-0.0 size= 86784kB time=00:00:14.51 bitrate=48996.2kbits/s speed=0.562x \nframe= 150 fps=5.7 q=-0.0 size= 89088kB time=00:00:14.91 bitrate=48947.6kbits/s speed=0.566x \nframe= 154 fps=5.7 q=-0.0 size= 91392kB time=00:00:15.31 bitrate=48901.6kbits/s speed=0.569x \nframe= 158 fps=5.8 q=-0.0 size= 93440kB time=00:00:15.71 bitrate=48724.4kbits/s speed=0.572x \nframe= 162 fps=5.8 q=-0.0 size= 95744kB time=00:00:16.11 bitrate=48686.2kbits/s speed=0.576x \nframe= 166 fps=5.8 q=-0.0 size= 98560kB time=00:00:16.51 bitrate=48903.9kbits/s speed=0.578x \nframe= 167 fps=5.8 q=-0.0 Lsize= 99903kB time=00:00:16.61 bitrate=49271.8kbits/s speed=0.575x \nvideo:99903kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000020%\nDone prediction.gif !!!\nGenerating predctionAirNow.gif ...\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:16.80, start: 0.000000, bitrate: 8060 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 8059 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/predctionAirNow.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0x2880f00] \u001B[0m255(+1) colors generated out of 1018539 colors; ratio=0.000250\nframe= 1 fps=0.1 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.0013x \nframe= 5 fps=0.6 q=-0.0 size= 2560kB time=00:00:00.41 bitrate=51150.0kbits/s speed=0.0498x \nframe= 9 fps=1.0 q=-0.0 size= 5120kB time=00:00:00.81 bitrate=51781.5kbits/s speed=0.0912x \nframe= 13 fps=1.4 q=-0.0 size= 7424kB time=00:00:01.21 bitrate=50262.3kbits/s speed=0.127x \nframe= 17 fps=1.7 q=-0.0 size= 9728kB time=00:00:01.61 bitrate=49498.0kbits/s speed=0.158x \nframe= 20 fps=1.9 q=-0.0 size= 11520kB time=00:00:01.91 bitrate=49409.3kbits/s speed=0.178x \nframe= 23 fps=2.0 q=-0.0 size= 13568kB time=00:00:02.21 bitrate=50293.7kbits/s speed=0.197x \nframe= 26 fps=2.2 q=-0.0 size= 15616kB time=00:00:02.51 bitrate=50966.6kbits/s speed=0.214x \nframe= 30 fps=2.4 q=-0.0 size= 18176kB time=00:00:02.91 bitrate=51167.6kbits/s speed=0.235x \nframe= 34 fps=2.6 q=-0.0 size= 20736kB time=00:00:03.31 bitrate=51320.0kbits/s speed=0.254x \nframe= 38 fps=2.8 q=-0.0 size= 23040kB time=00:00:03.71 bitrate=50874.3kbits/s speed=0.271x \nframe= 42 fps=2.9 q=-0.0 size= 25344kB time=00:00:04.11 bitrate=50515.3kbits/s speed=0.287x \nframe= 45 fps=3.0 q=-0.0 size= 27392kB time=00:00:04.41 bitrate=50883.3kbits/s speed=0.297x \nframe= 48 fps=3.1 q=-0.0 size= 29184kB time=00:00:04.71 bitrate=50759.1kbits/s speed=0.306x \nframe= 52 fps=3.2 q=-0.0 size= 32000kB time=00:00:05.11 bitrate=51300.2kbits/s speed=0.318x \nframe= 56 fps=3.4 q=-0.0 size= 34304kB time=00:00:05.51 bitrate=51001.5kbits/s speed=0.33x \nframe= 59 fps=3.4 q=-0.0 size= 36096kB time=00:00:05.81 bitrate=50894.7kbits/s speed=0.337x \nframe= 62 fps=3.5 q=-0.0 size= 37888kB time=00:00:06.11 bitrate=50798.4kbits/s speed=0.345x \nframe= 66 fps=3.6 q=-0.0 size= 40192kB time=00:00:06.51 bitrate=50576.5kbits/s speed=0.354x \nframe= 69 fps=3.6 q=-0.0 size= 42240kB time=00:00:06.81 bitrate=50812.1kbits/s speed=0.36x \nframe= 72 fps=3.7 q=-0.0 size= 44288kB time=00:00:07.11 bitrate=51027.7kbits/s speed=0.365x \nframe= 75 fps=3.8 q=-0.0 size= 46336kB time=00:00:07.41 bitrate=51226.0kbits/s speed=0.371x \nframe= 78 fps=3.8 q=-0.0 size= 48128kB time=00:00:07.71 bitrate=51136.8kbits/s speed=0.377x \nframe= 82 fps=3.9 q=-0.0 size= 50688kB time=00:00:08.11 bitrate=51200.5kbits/s speed=0.384x \nframe= 85 fps=3.9 q=-0.0 size= 52224kB time=00:00:08.41 bitrate=50870.3kbits/s speed=0.389x \nframe= 89 fps=4.0 q=-0.0 size= 54528kB time=00:00:08.81 bitrate=50703.0kbits/s speed=0.395x \nframe= 92 fps=4.0 q=-0.0 size= 56576kB time=00:00:09.11 bitrate=50874.9kbits/s speed= 0.4x \nframe= 95 fps=4.1 q=-0.0 size= 58624kB time=00:00:09.41 bitrate=51035.9kbits/s speed=0.403x \nframe= 98 fps=4.1 q=-0.0 size= 60672kB time=00:00:09.71 bitrate=51186.9kbits/s speed=0.407x \nframe= 102 fps=4.2 q=-0.0 size= 63232kB time=00:00:10.11 bitrate=51236.1kbits/s speed=0.413x \nframe= 105 fps=4.2 q=-0.0 size= 65024kB time=00:00:10.41 bitrate=51169.7kbits/s speed=0.416x \nframe= 108 fps=4.2 q=-0.0 size= 66816kB time=00:00:10.71 bitrate=51107.1kbits/s speed=0.42x \nframe= 112 fps=4.3 q=-0.0 size= 69120kB time=00:00:11.11 bitrate=50965.9kbits/s speed=0.424x \nframe= 116 fps=4.3 q=-0.0 size= 71680kB time=00:00:11.51 bitrate=51016.7kbits/s speed=0.429x \nframe= 119 fps=4.3 q=-0.0 size= 73472kB time=00:00:11.81 bitrate=50963.8kbits/s speed=0.432x \nframe= 122 fps=4.4 q=-0.0 size= 75520kB time=00:00:12.11 bitrate=51086.7kbits/s speed=0.434x \nframe= 126 fps=4.4 q=-0.0 size= 78080kB time=00:00:12.51 bitrate=51129.6kbits/s speed=0.439x \nframe= 130 fps=4.5 q=-0.0 size= 80384kB time=00:00:12.91 bitrate=51007.4kbits/s speed=0.442x \nframe= 133 fps=4.5 q=-0.0 size= 82176kB time=00:00:13.21 bitrate=50960.3kbits/s speed=0.445x \nframe= 137 fps=4.5 q=-0.0 size= 84480kB time=00:00:13.61 bitrate=50849.4kbits/s speed=0.449x \nframe= 140 fps=4.5 q=-0.0 size= 86528kB time=00:00:13.91 bitrate=50958.8kbits/s speed=0.451x \nframe= 143 fps=4.6 q=-0.0 size= 88576kB time=00:00:14.21 bitrate=51063.7kbits/s speed=0.453x \nframe= 146 fps=4.6 q=-0.0 size= 90368kB time=00:00:14.51 bitrate=51019.6kbits/s speed=0.455x \nframe= 150 fps=4.6 q=-0.0 size= 93184kB time=00:00:14.91 bitrate=51198.1kbits/s speed=0.458x \nframe= 154 fps=4.6 q=-0.0 size= 95488kB time=00:00:15.31 bitrate=51093.3kbits/s speed=0.461x \nframe= 157 fps=4.7 q=-0.0 size= 97280kB time=00:00:15.61 bitrate=51051.7kbits/s speed=0.463x \nframe= 161 fps=4.7 q=-0.0 size= 99584kB time=00:00:16.01 bitrate=50955.2kbits/s speed=0.466x \nframe= 164 fps=4.7 q=-0.0 size= 101632kB time=00:00:16.31 bitrate=51046.6kbits/s speed=0.468x \nframe= 167 fps=4.7 q=-0.0 size= 103424kB time=00:00:16.61 bitrate=51008.4kbits/s speed=0.469x \nframe= 167 fps=4.7 q=-0.0 Lsize= 104349kB time=00:00:16.61 bitrate=51464.4kbits/s speed=0.467x \nvideo:104348kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000019%\nDone predctionAirNow.gif !!!\n", + "history_begin_time" : 1655086084174, + "history_end_time" : 1655140838437, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "8MzXVqAjfutP", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n print(\"Saving...: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t_Airnow), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n print(\"Saving...: \", plotTitle)\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t_Airnow) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\nprint(\"Generating prediction.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\nprint(\"Done prediction.gif !!!\")\n\nprint(\"Generating predctionAirNow.gif ...\")\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\nprint(\"Done predctionAirNow.gif !!!\")\n\n", + "history_output" : null, + "history_begin_time" : 1655086021362, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "kekOPnezBZg2", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t_Airnow), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t_Airnow) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\n", + "history_begin_time" : 1655079456779, + "history_end_time" : 1655086051413, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "BAO9J6BPVVbI", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\"):\n for file in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(\"/groups/ESS/aalnaim/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t_Airnow), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t_Airnow) + \".tif\")\n \n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "Running", + "history_begin_time" : 1655074546095, + "history_end_time" : 1655074618211, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "10tLFD7OwF94", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system(\"module load ffmpeg\")\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/prediction.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:04.80, start: 0.000000, bitrate: 7417 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 7415 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/prediction.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0xe9ac80] \u001B[0m255(+1) colors generated out of 351763 colors; ratio=0.000725\nframe= 1 fps=0.6 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00552x \nframe= 6 fps=2.6 q=-0.0 size= 3072kB time=00:00:00.51 bitrate=49344.8kbits/s speed=0.217x \nframe= 10 fps=3.5 q=-0.0 size= 5376kB time=00:00:00.91 bitrate=48395.8kbits/s speed=0.318x \nframe= 14 fps=4.1 q=-0.0 size= 7680kB time=00:00:01.31 bitrate=48026.4kbits/s speed=0.387x \nframe= 18 fps=4.6 q=-0.0 size= 10240kB time=00:00:01.71 bitrate=49056.2kbits/s speed=0.44x \nframe= 22 fps=4.9 q=-0.0 size= 12800kB time=00:00:02.11 bitrate=49695.5kbits/s speed=0.475x \nframe= 26 fps=5.2 q=-0.0 size= 15360kB time=00:00:02.51 bitrate=50131.1kbits/s speed=0.503x \nframe= 31 fps=5.5 q=-0.0 size= 18432kB time=00:00:03.01 bitrate=50164.4kbits/s speed=0.538x \nframe= 35 fps=5.7 q=-0.0 size= 20736kB time=00:00:03.41 bitrate=49815.0kbits/s speed=0.559x \nframe= 39 fps=5.9 q=-0.0 size= 23040kB time=00:00:03.81 bitrate=49539.0kbits/s speed=0.576x \nframe= 43 fps=6.0 q=-0.0 size= 25600kB time=00:00:04.21 bitrate=49813.6kbits/s speed=0.592x \nframe= 47 fps=6.1 q=-0.0 size= 28160kB time=00:00:04.61 bitrate=50040.5kbits/s speed=0.601x \nframe= 47 fps=6.0 q=-0.0 Lsize= 28948kB time=00:00:04.61 bitrate=51440.5kbits/s speed=0.591x \nvideo:28948kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000067%\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\nInput #0, mov,mp4,m4a,3gp,3g2,mj2, from '/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Duration: 00:00:02.40, start: 0.000000, bitrate: 7315 kb/s\n Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 2000x1808, 7311 kb/s, 10 fps, 10 tbr, 10240 tbn, 20 tbc (default)\n Metadata:\n handler_name : VideoHandler\nStream mapping:\n Stream #0:0 -> #0:0 (h264 (native) -> gif (native))\nPress [q] to stop, [?] for help\nOutput #0, gif, to '/groups/ESS/aalnaim/cmaq/predctionAirNow.gif':\n Metadata:\n major_brand : isom\n minor_version : 512\n compatible_brands: isomiso2avc1mp41\n encoder : Lavf58.29.100\n Stream #0:0(und): Video: gif, pal8, 2000x1808, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc (default)\n Metadata:\n handler_name : VideoHandler\n encoder : Lavc58.54.100 gif\nframe= 0 fps=0.0 q=0.0 size= 0kB time=00:00:00.00 bitrate=N/A speed= 0x \n\u001B[1;32m[Parsed_palettegen_2 @ 0xa3b380] \u001B[0m255(+1) colors generated out of 611797 colors; ratio=0.000417\nframe= 1 fps=0.6 q=-0.0 size= 0kB time=00:00:00.01 bitrate= 0.0kbits/s speed=0.00629x \nframe= 6 fps=2.7 q=-0.0 size= 3072kB time=00:00:00.51 bitrate=49344.8kbits/s speed=0.23x \nframe= 10 fps=3.6 q=-0.0 size= 5376kB time=00:00:00.91 bitrate=48395.8kbits/s speed=0.325x \nframe= 14 fps=4.1 q=-0.0 size= 7936kB time=00:00:01.31 bitrate=49627.3kbits/s speed=0.384x \nframe= 18 fps=4.5 q=-0.0 size= 10240kB time=00:00:01.71 bitrate=49056.2kbits/s speed=0.428x \nframe= 22 fps=4.7 q=-0.0 size= 12800kB time=00:00:02.11 bitrate=49695.5kbits/s speed=0.453x \nframe= 23 fps=4.6 q=-0.0 Lsize= 14231kB time=00:00:02.21 bitrate=52750.5kbits/s speed=0.442x \nvideo:14231kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.000137%\n", + "history_begin_time" : 1654612325236, + "history_end_time" : 1654616699751, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "n74KG3f7cCSj", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system(\"module load ffmpeg\")\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg; ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "ffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\n\u001B[1;31m/groups/ESS/aalnaim/cmaq/prediction.mp4: No such file or directory\n\u001B[0mrm: cannot remove '/groups/ESS/aalnaim/cmaq/prediction.mp4': No such file or directory\nffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers\n built with gcc 9.3.0 (GCC)\n configuration: --prefix=/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0/ffmpeg-4.2.2-ey --enable-pic --cc=/opt/sw/spack/0.16.0/lib/spack/env/gcc/gcc --cxx=/opt/sw/spack/0.16.0/lib/spack/env/gcc/g++ --enable-libxcb --enable-libxcb-shape --enable-libxcb-shm --enable-libxcb-xfixes --enable-xlib --disable-libfontconfig --disable-libfreetype --disable-libfribidi --enable-bzlib --enable-libmp3lame --enable-libopenjpeg --disable-libopus --enable-libspeex --enable-libvorbis --enable-avresample --enable-openssl --enable-shared --disable-libzmq --disable-libssh --disable-libwebp --disable-lzma --disable-libsnappy --enable-sdl2 --enable-libaom\n libavutil 56. 31.100 / 56. 31.100\n libavcodec 58. 54.100 / 58. 54.100\n libavformat 58. 29.100 / 58. 29.100\n libavdevice 58. 8.100 / 58. 8.100\n libavfilter 7. 57.100 / 7. 57.100\n libavresample 4. 0. 0 / 4. 0. 0\n libswscale 5. 5.100 / 5. 5.100\n libswresample 3. 5.100 / 3. 5.100\n\u001B[1;31m/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4: No such file or directory\n\u001B[0mrm: cannot remove '/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4': No such file or directory\n", + "history_begin_time" : 1654612249570, + "history_end_time" : 1654616699120, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "jWtjlHGcemUf", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nsubprocess.Popen([\"module load ffmpeg\"], shell=True)\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nsh: ffmpeg: command not found\nsh: ffmpeg: command not found\n", + "history_begin_time" : 1654543238091, + "history_end_time" : 1654612056260, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "AeZ2UVbaKt7h", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nsubprocess.run([\"module load ffmpeg\"], shell=True)\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nsh: ffmpeg: command not found\nsh: ffmpeg: command not found\n", + "history_begin_time" : 1654543129781, + "history_end_time" : 1654612055692, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "nFQHSJwfBoGv", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nsubprocess.run([\"module load ffmpeg\"])\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 18, in \n subprocess.run([\"module load ffmpeg\"])\n File \"/opt/sw/spack/apps/linux-centos8-x86_64/gcc-9.3.0/python-3.8.6-ff/lib/python3.8/subprocess.py\", line 489, in run\n with Popen(*popenargs, **kwargs) as process:\n File \"/opt/sw/spack/apps/linux-centos8-x86_64/gcc-9.3.0/python-3.8.6-ff/lib/python3.8/subprocess.py\", line 854, in __init__\n self._execute_child(args, executable, preexec_fn, close_fds,\n File \"/opt/sw/spack/apps/linux-centos8-x86_64/gcc-9.3.0/python-3.8.6-ff/lib/python3.8/subprocess.py\", line 1702, in _execute_child\n raise child_exception_type(errno_num, err_msg, err_filename)\nFileNotFoundError: [Errno 2] No such file or directory: 'module load ffmpeg'\n", + "history_begin_time" : 1654543056266, + "history_end_time" : 1654612054724, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "XAuF3xRdZNul", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\nimport subprocess\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nsubprocess.run([\"module\", \"load\", \"ffmpeg\"])\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 18, in \n subprocess.run([\"module\", \"load\", \"ffmpeg\"])\n File \"/opt/sw/spack/apps/linux-centos8-x86_64/gcc-9.3.0/python-3.8.6-ff/lib/python3.8/subprocess.py\", line 489, in run\n with Popen(*popenargs, **kwargs) as process:\n File \"/opt/sw/spack/apps/linux-centos8-x86_64/gcc-9.3.0/python-3.8.6-ff/lib/python3.8/subprocess.py\", line 854, in __init__\n self._execute_child(args, executable, preexec_fn, close_fds,\n File \"/opt/sw/spack/apps/linux-centos8-x86_64/gcc-9.3.0/python-3.8.6-ff/lib/python3.8/subprocess.py\", line 1702, in _execute_child\n raise child_exception_type(errno_num, err_msg, err_filename)\nFileNotFoundError: [Errno 2] No such file or directory: 'module'\n", + "history_begin_time" : 1654542992068, + "history_end_time" : 1654612054306, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "7S2WHoRQempe", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('module load ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nLmod has detected the following error: The following module(s) are unknown:\n\"/groups/ESS/aalnaim/cmaq/prediction.mp4\"\n\"/groups/ESS/aalnaim/cmaq/prediction.gif\"\n\"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse,\nfps=10\"\n\nPlease check the spelling or version number. Also try \"module spider ...\"\nIt is also possible your cache file is out-of-date; it may help to try:\n $ module --ignore-cache load \"/groups/ESS/aalnaim/cmaq/prediction.mp4\"\n\"/groups/ESS/aalnaim/cmaq/prediction.gif\"\n\"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse,\nfps=10\"\n\nAlso make sure that all modulefiles written in TCL start with the string\n#%Module\n\n\n\nLmod has detected the following error: The following module(s) are unknown:\n\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\"\n\"/groups/ESS/aalnaim/cmaq/predctionAirNow.gif\"\n\"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse,\nfps=10\"\n\nPlease check the spelling or version number. Also try \"module spider ...\"\nIt is also possible your cache file is out-of-date; it may help to try:\n $ module --ignore-cache load \"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\"\n\"/groups/ESS/aalnaim/cmaq/predctionAirNow.gif\"\n\"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse,\nfps=10\"\n\nAlso make sure that all modulefiles written in TCL start with the string\n#%Module\n\n\n\n", + "history_begin_time" : 1654542816465, + "history_end_time" : 1654612053775, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "n6gawnwlgjz", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nsh: ffmpeg: command not found\nsh: ffmpeg: command not found\n", + "history_begin_time" : 1654542716355, + "history_end_time" : 1654542762828, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "n7D7FuOYZu5v", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\nos.system('ffmpeg')\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "sh: ffmpeg: command not found\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nsh: ffmpeg: command not found\nsh: ffmpeg: command not found\n", + "history_begin_time" : 1654542260336, + "history_end_time" : 1654616701091, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "ruL054c6Kkyh", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nsh: ffmpeg: command not found\nsh: ffmpeg: command not found\n", + "history_begin_time" : 1654541264666, + "history_end_time" : 1654616701806, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "5RTy9WcP8aXY", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 53, in \n imageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/v2.py\", line 331, in mimwrite\n return file.write(ims, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/core/legacy_plugin_wrapper.py\", line 187, in write\n raise RuntimeError(\"Zero images were written.\")\nRuntimeError: Zero images were written.\n", + "history_begin_time" : 1654541069223, + "history_end_time" : 1655141009325, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "OsNeV3ihkRmE", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052600.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052601.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052602.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052603.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052604.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052605.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052606.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052607.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052608.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052609.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052610.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052611.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052612.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052613.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052614.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052615.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052616.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052617.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052618.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052619.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052620.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052621.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052622.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052623.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052700.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052701.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052702.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052703.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052704.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052705.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052706.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052707.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052708.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052709.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052710.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052711.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052712.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052713.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052714.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052715.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052716.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052717.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052718.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052719.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052720.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052721.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052722.tif\n/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_2022052723.tif\nIMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (2000, 1800) to (2000, 1808) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 67, in \n imageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/v2.py\", line 331, in mimwrite\n return file.write(ims, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/imageio/core/legacy_plugin_wrapper.py\", line 187, in write\n raise RuntimeError(\"Zero images were written.\")\nRuntimeError: Zero images were written.\n", + "history_begin_time" : 1654540922740, + "history_end_time" : 1655141010052, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "uokU0G6ljr7D", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : null, + "history_begin_time" : 1654540891652, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "CGQzJQjhEbCj", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave('/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : "/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\n", + "history_begin_time" : 1654537256684, + "history_end_time" : 1654540891689, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "btjgotj4IVOm", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : " File \"prediction_maps.py\", line 110\n imageio.mimsave(\"/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n ^\nSyntaxError: EOL while scanning string literal\n", + "history_begin_time" : 1654536959895, + "history_end_time" : 1654537090230, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "2x6aznc7yzz", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(\"/groups/ESS/aalnaim/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/groups/ESS/aalnaim/cmaq/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/groups/ESS/aalnaim/cmaq/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" /groups/ESS/aalnaim/cmaq/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm /groups/ESS/aalnaim/cmaq/predctionAirNow.mp4')\n", + "history_output" : " File \"prediction_maps.py\", line 110\n imageio.mimsave(\"/groups/ESS/aalnaim/cmaq/prediction.mp4', images, fps=10)\n ^\nSyntaxError: EOL while scanning string literal\n", + "history_begin_time" : 1654500507944, + "history_end_time" : 1654500512174, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "qyxg89sb7zq", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/Users/uhhmed/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/Users/uhhmed/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 29, in \n df = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/prediction_files/prediction_rf.csv'\n", + "history_begin_time" : 1654477566675, + "history_end_time" : 1654477601258, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "iqmp8lo8hbd", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/Users/uhhmed/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/Users/uhhmed/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 7, in \n import geopandas as gpd\nModuleNotFoundError: No module named 'geopandas'\n", + "history_begin_time" : 1654466004552, + "history_end_time" : 1654466005321, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "100001", + "indicator" : "Failed" +},{ + "history_id" : "6fvj9bybla5", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/Users/uhhmed/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/Users/uhhmed/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 29, in \n df = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/prediction_files/prediction_rf.csv'\n", + "history_begin_time" : 1654456176250, + "history_end_time" : 1654456208123, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "uz46x5chifo", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/Users/uhhmed/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/Users/uhhmed/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/predctionAirNow.mp4')\n", + "history_output" : "Running", + "history_begin_time" : 1654319972952, + "history_end_time" : 1654537091394, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "zl0dxas1fen", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/Users/uhhmed/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/Users/uhhmed/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 8, in \n import cmaps\nModuleNotFoundError: No module named 'cmaps'\n", + "history_begin_time" : 1654318014269, + "history_end_time" : 1654318045679, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "zcneduczmek", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\n#os.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\nAirnowObs = AirnowObs.loc[AirnowObs['AirNOW_O3'] != -999]\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_Airnow = AirnowObs['YYYYMMDDHH'].unique()\ndfs_Airnow = dict(tuple(AirnowObs.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n #gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=80 )\n #cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n # format='%.0f', boundaries=np.arange(0,84,4), \n # spacing='uniform', drawedges=True, pad=0.05)\n\n #cb.outline.set_linewidth(2)\n #cb.dividers.set_color('black')\n #cb.dividers.set_linewidth(2)\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/CMAQ_\" + str(t) + \".tif\")\n\n \nfor t_Airnow, t in zip(time_Airnow, time_):\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n \n \n predDf = gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap, vmin=0,vmax=84 )\n \n df_Airnow = dfs_Airnow[t_Airnow]\n # Add individual AirNow stations in US with color representation.\n \n gdfAirnow = gpd.GeoDataFrame(df_Airnow, geometry=gpd.points_from_xy(df_Airnow.Longitude,df_Airnow.Latitude))\n\n gdfAirnow = gdfAirnow.set_crs(\"EPSG:4326\")\n gdfAirnow.plot(ax=predDf, column='AirNOW_O3', marker='o', markersize=65, cmap=cmap, figsize=(20, 15), linewidths=1, edgecolors=\"black\")\n \n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(\"AirNow Stations: \"+plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/AirNow_\" + str(t) + \".tif\")\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/CMAQ_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\nfiles=glob.glob(\"/Users/uhhmed/prediction_maps/Airnow_*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(\"/Users/uhhmed/predctionAirNow.mp4\", images, fps=10)\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/predctionAirNow.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/predctionAirNow.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/predctionAirNow.mp4')\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 8, in \n import cmaps\nModuleNotFoundError: No module named 'cmaps'\n", + "history_begin_time" : 1654314798230, + "history_end_time" : 1654314830034, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "vtvaz3j99z7", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\nnorm = mpl.colors.Normalize(vmin=0,vmax=84)\nsm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\")\n gdf = gdf.to_crs(\"+proj=lcc +lat_1=2 +lat_2=33.000 +lat_0=45.000 +lon_0=-97.000 +x_0=-97.000 +y_0=40.000 +datum=NAD83 +units=m +no_defs\")\n\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap )\n cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05)\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n", + "history_output" : "", + "history_begin_time" : 1653698755259, + "history_end_time" : 1653700889963, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "YhalkCwcoFfg", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\nimport os\n\n\nmpl.rcParams['font.size'] = 25\nos.system('module load ffmpeg') # Uncomment if running on HOPPER\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ncmap = cmaps.WhiteBlueGreenYellowRed[0:262:12]\nnorm = mpl.colors.Normalize(vmin=0,vmax=84)\nsm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:3762\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=False, figsize=(20, 18),\n cmap=cmap )\n cb = plt.colorbar(sm, ticks=list(range(0, 84, 4)), location='bottom', \t\n format='%.0f', boundaries=np.arange(0,84,4), \n spacing='uniform', drawedges=True, pad=0.05)\n\n cb.outline.set_linewidth(2)\n cb.dividers.set_color('black')\n cb.dividers.set_linewidth(2)\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\n#files = sorted(files)\nfiles = sorted(files)\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.mp4', images, fps=10)\n\n\n# create .gif from .mp4 using FFmpeg\nos.system('ffmpeg -i '+ home + '/prediction.mp4 -vf \"scale=2000:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse, fps=10\" '+ home + '/prediction.gif')\n\n# remove the created .mp4 file\nos.system('rm '+ home + '/prediction.mp4')\n", + "history_output" : "/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\n", + "history_begin_time" : 1653399059064, + "history_end_time" : 1653402790984, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "r4961ogi5rp", + "history_input" : "# importing necessary libraries\nimport os\nimport pandas as pd\nimport matplotlib as m\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\nimport random\nimport string\n\n\n# printing lowercase\nletters = string.ascii_lowercase\nfolder_name = ''.join(random.choice(letters) for i in range(5))\n\n\n#m.rcParams['font.size'] = 25\n#norm= m.colors.Normalize(vmin=0,vmax=80)\ncdict = {\n 'red' : ( (0.0, 0.25, .25), (0.02, .59, .59), (1., 1., 1.)),\n 'green': ( (0.0, 0.0, 0.0), (0.02, .45, .45), (1., .97, .97)),\n 'blue' : ( (0.0, 1.0, 1.0), (0.02, .75, .75), (1., 0.45, 0.45))\n}\n\ncm = m.colors.LinearSegmentedColormap('my_colormap', cdict, 1024)\n\n\n# home directory\nhome = \"D:/data/\"#str(Path.home())\ncmaq_folder = f\"{home}/cmaq/\"\ninput_folder = f\"{home}/cmaq/prediction_files/\"\nresult_folder = f\"{home}/cmaq/prediction_maps_{folder_name}/\"\nos.makedirs(cmaq_folder, exist_ok=True)\nos.makedirs(input_folder, exist_ok=True)\nos.makedirs(result_folder, exist_ok=True)\n\n# importing data\ndf = pd.read_csv(f'D:/data/prediction_rf.csv')\nAirnowObs = pd.read_csv(f\"D:/data/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\ntime_ = time_[:5]\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n #gdf = gdf.set_crs(\"EPSG:3762\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n #print(cmaps.WhiteBlueGreenYellowRed())\n \n gdf.plot(column='prediction', \n legend=True, \n figsize=(20, 25),\n cmap=cm,\n #categorical=True,\n #cmap=cmaps.WhiteBlueGreenYellowRed, \n legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05}, \n vmin=0, \n vmax=80 )\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n print(t)\n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(f\"{result_folder}/{str(t)}.tif\")\n\n \nfiles=glob.glob(f\"{result_folder}/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(f'{cmaq_folder}/prediction_{folder_name}.gif', images)", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 7, in \n import geopandas as gpd\nModuleNotFoundError: No module named 'geopandas'\n", + "history_begin_time" : 1652934721295, + "history_end_time" : 1652934722164, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "100001", + "indicator" : "Done" +},{ + "history_id" : "2hWdUd61m31u", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# Delete previous .tif images only (not folder) to reduce space if folder and files exist already.\nif Path(home + \"/cmaq/prediction_maps/\"):\n for file in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\"):\n if file.is_file():\n [f.unlink() for f in Path(home + \"/cmaq/prediction_maps/\").glob(\"*\") if f.is_file()] \n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n #gdf = gdf.set_crs(\"EPSG:3762\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05}, vmin=0, vmax=80 )\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.gif', images)", + "history_output" : "Running", + "history_begin_time" : 1652835729699, + "history_end_time" : 1653400545931, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "Q7aByEWbhBW2", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n#fig, ax = plt.subplots(figsize=(20, 25))\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n #gdf = gdf.set_crs(\"EPSG:3762\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05}, vmin=0, vmax=80 )\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.gif', images)", + "history_output" : "Running", + "history_begin_time" : 1652833318699, + "history_end_time" : 1652835575643, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "6j67tfndue7", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n#fig, ax = plt.subplots(figsize=(20, 25))\n\n\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n #gdf = gdf.set_crs(\"EPSG:3762\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05}, vmin=0, vmax=80 )\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.gif', images)", + "history_output" : "/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\nTraceback (most recent call last):\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/PIL/Image.py\", line 2212, in save\n save_handler(self, fp, filename)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/PIL/TiffImagePlugin.py\", line 1769, in _save\n ImageFile._save(\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/PIL/ImageFile.py\", line 496, in _save\n fp.flush()\nOSError: [Errno 122] Disk quota exceeded\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"prediction_maps.py\", line 58, in \n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/pyplot.py\", line 958, in savefig\n res = fig.savefig(*args, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/figure.py\", line 3019, in savefig\n self.canvas.print_figure(fname, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/backend_bases.py\", line 2319, in print_figure\n result = print_method(\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/backend_bases.py\", line 1648, in wrapper\n return func(*args, **kwargs)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/matplotlib/backends/backend_agg.py\", line 594, in print_tif\n return (Image.fromarray(np.asarray(self.buffer_rgba()))\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/PIL/Image.py\", line 2216, in save\n fp.close()\nOSError: [Errno 122] Disk quota exceeded\n", + "history_begin_time" : 1652832428259, + "history_end_time" : 1652832957060, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "atc9piiq4bd", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_rf.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n#fig, ax = plt.subplots(figsize=(20, 25))\n\ntime_ = time_[:3]\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n #gdf = gdf.set_crs(\"EPSG:3762\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05}, vmin=0, vmax=80 )\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction_min.gif', images)", + "history_output" : "/home/aalnaim/cmaq/prediction_maps/2022050912.tif\n/home/aalnaim/cmaq/prediction_maps/2022050913.tif\n/home/aalnaim/cmaq/prediction_maps/2022050914.tif\n/home/aalnaim/cmaq/prediction_maps/2022050915.tif\n/home/aalnaim/cmaq/prediction_maps/2022050916.tif\n/home/aalnaim/cmaq/prediction_maps/2022051712.tif\n/home/aalnaim/cmaq/prediction_maps/2022051713.tif\n/home/aalnaim/cmaq/prediction_maps/2022051714.tif\n", + "history_begin_time" : 1652831564484, + "history_end_time" : 1652831640120, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "IrZ4cMXvW77L", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_xgboost.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n# fig, ax = plt.subplots(figsize=(20, 25))\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, norm=norm, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05})\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.gif', images)", + "history_output" : null, + "history_begin_time" : 1652795640186, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "EHRAUeY2Am5e", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_xgboost.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n# fig, ax = plt.subplots(figsize=(20, 25))\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, norm=norm, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05})\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.gif', images)", + "history_output" : "", + "history_begin_time" : 1652795412335, + "history_end_time" : 1652795640225, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : "Done" +},{ + "history_id" : "0a6oMvVMQkch", + "history_input" : "# importing necessary libraries\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom pathlib import Path\nimport geopandas as gpd\nimport cmaps\nfrom datetime import datetime\n\nimport imageio\nimport glob\n\n\nmatplotlib.rcParams['font.size'] = 25\nnorm= matplotlib.colors.Normalize(vmin=0,vmax=80)\n\n\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf = pd.read_csv(home + '/cmaq/prediction_files/prediction_xgboost.csv')\nAirnowObs = pd.read_csv(home + \"/cmaq/observation.csv\")\n\ntime_ = df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\n# fig, ax = plt.subplots(figsize=(20, 25))\n\nfor t in time_:\n \n df = dfs[t]\n \n gdf = gpd.GeoDataFrame(\n df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\", allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction', legend=True, figsize=(20, 25),\n cmap=cmaps.WhiteBlueGreenYellowRed, norm=norm, legend_kwds={'orientation': \"horizontal\", \"pad\": 0.05})\n \n # Add individual AirNow stations in US with color representation.\n \n # gdfAirnow = gpd.GeoDataFrame(\n # AirnowObs, geometry=gpd.points_from_xy(AirnowObs.Longitude, AirnowObs.Latitude))\n # gdfAirnow.plot(ax=predDF, column='AirNOW_O3', marker='o', cmap=cmaps.WhiteBlueGreenYellowRed, figsize=(20, 20))\n \n # Add US states boundries.\n \n # states = gpd.read_file('usStates/cb_2018_us_state_500k.shp')\n\t# states = states.to_crs(\"EPSG:4326\")\n\t# states.boundary.plot(ax=ax)\n \n dateObj = datetime.strptime(str(t), \"%Y%m%d%H\")\n plotTitle = datetime.strftime(dateObj, \"%Y-%m-%d (Time: %-H)\")\n plt.title(plotTitle, fontdict={'fontsize': 35})\n plt.savefig(home + \"/cmaq/prediction_maps/\" + str(t) + \".tif\")\n\n \nfiles=glob.glob(home + \"/cmaq/prediction_maps/*.tif\")\nimages=[]\n\nfor i in files:\n print(i)\n img=imageio.imread(i)\n images.append(img)\n \nimageio.mimsave(home+'/prediction.gif', images)", + "history_output" : null, + "history_begin_time" : 1652795403314, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : null, + "indicator" : null +},{ + "history_id" : "bzla5a7gju4", + "history_input" : null, + "history_output" : "Received unknown response code", + "history_begin_time" : 1652786146413, + "history_end_time" : 1652786147149, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "1suhj06qxi7", + "history_input" : "\n## importing necessary libraries\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport plotly.express as px\nfrom pathlib import Path\nimport geopandas as gpd\nimport shapely\n#from osgeo import gdal\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf=pd.read_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv')\n\ntime_=df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\nfor t in time_:\n df=dfs[t]\n gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\",allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction',cmap='bwr')\n plt.savefig(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\")\n \n\n \n \n \n \n\n# total area for the grid\n# xmin, ymin, xmax, ymax= gdf.geometry.total_bounds\n# gdf=gdal.OpenEx(gdf.to_json(), gdal.OF_VECTOR)\n# how many cells across and down\n# xsize=422\n# ysize=265\n# gdal.Grid(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\", gdf, zfield=\"prediction\",outputSRS =\"EPSG:4326\", algorithm=\"linear\", outputBounds=[xmax,ymax,xmin,ymin], width=xsize, height=ysize)\n", + "history_output" : "/home/aalnaim/CMAQAI/lib/python3.8/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\n", + "history_begin_time" : 1652055417930, + "history_end_time" : 1652786159490, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "31n0woa3ywm", + "history_input" : "\n## importing necessary libraries\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport plotly.express as px\nfrom pathlib import Path\nimport geopandas as gpd\nimport shapely\nfrom osgeo import gdal\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf=pd.read_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv')\n\ntime_=df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\nfor t in time_:\n df=dfs[t]\n gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\",allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction',cmap='bwr')\n plt.savefig(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\")\n \n\n \n \n \n \n\n# total area for the grid\n# xmin, ymin, xmax, ymax= gdf.geometry.total_bounds\n# gdf=gdal.OpenEx(gdf.to_json(), gdal.OF_VECTOR)\n# how many cells across and down\n# xsize=422\n# ysize=265\n# gdal.Grid(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\", gdf, zfield=\"prediction\",outputSRS =\"EPSG:4326\", algorithm=\"linear\", outputBounds=[xmax,ymax,xmin,ymin], width=xsize, height=ysize)\n", + "history_output" : "Traceback (most recent call last):\n File \"prediction_maps.py\", line 5, in \n import plotly.express as px\nModuleNotFoundError: No module named 'plotly'\n", + "history_begin_time" : 1652048263170, + "history_end_time" : 1652048283203, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "e9a0atiwzvg", + "history_input" : "\n## importing necessary libraries\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport plotly.express as px\nfrom pathlib import Path\nimport geopandas as gpd\nimport shapely\nfrom osgeo import gdal\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf=pd.read_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv')\n\ntime_=df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\nfor t in time_:\n df=dfs[t]\n gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\",allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction',cmap='bwr')\n plt.savefig(home+\"/cmaq/prediction_maps/maps/\"+str(t)+\".tif\")\n \n\n \n \n \n \n\n# total area for the grid\n# xmin, ymin, xmax, ymax= gdf.geometry.total_bounds\n# gdf=gdal.OpenEx(gdf.to_json(), gdal.OF_VECTOR)\n# how many cells across and down\n# xsize=422\n# ysize=265\n# gdal.Grid(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\", gdf, zfield=\"prediction\",outputSRS =\"EPSG:4326\", algorithm=\"linear\", outputBounds=[xmax,ymax,xmin,ymin], width=xsize, height=ysize)\n", + "history_output" : "", + "history_begin_time" : 1650481223744, + "history_end_time" : 1650481409083, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "ui5x2vpwlrt", + "history_input" : "\n## importing necessary libraries\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport plotly.express as px\nfrom pathlib import Path\nimport geopandas as gpd\nimport shapely\nfrom osgeo import gdal\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf=pd.read_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv')\n\ntime_=df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\nfor t in time_:\n df=dfs[t]\n gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\",allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction',cmap='bwr')\n plt.savefig(home+\"/cmaq/prediction_maps/maps/\"+str(t)+\".tif\")\n \n\n \n \n \n \n\n# total area for the grid\n# xmin, ymin, xmax, ymax= gdf.geometry.total_bounds\n# gdf=gdal.OpenEx(gdf.to_json(), gdal.OF_VECTOR)\n# how many cells across and down\n# xsize=422\n# ysize=265\n# gdal.Grid(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\", gdf, zfield=\"prediction\",outputSRS =\"EPSG:4326\", algorithm=\"linear\", outputBounds=[xmax,ymax,xmin,ymin], width=xsize, height=ysize)\n", + "history_output" : "Traceback (most recent call last):\n File \"/home/mislam25/gw-workspace/ui5x2vpwlrt/prediction_maps.py\", line 14, in \n df=pd.read_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv')\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/util/_decorators.py\", line 311, in wrapper\n return func(*args, **kwargs)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 586, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 482, in _read\n parser = TextFileReader(filepath_or_buffer, **kwds)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 811, in __init__\n self._engine = self._make_engine(self.engine)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 1040, in _make_engine\n return mapping[engine](self.f, **self.options) # type: ignore[call-arg]\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/c_parser_wrapper.py\", line 51, in __init__\n self._open_handles(src, kwds)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/base_parser.py\", line 222, in _open_handles\n self.handles = get_handle(\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/common.py\", line 702, in get_handle\n handle = open(\nFileNotFoundError: [Errno 2] No such file or directory: '/home/mislam25/cmaq/prediction_files/prediction_xgboost.csv'\n", + "history_begin_time" : 1650474160742, + "history_end_time" : 1650474371342, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "p20m01wlgz3", + "history_input" : "\n## importing necessary libraries\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport plotly.express as px\nfrom pathlib import Path\nimport geopandas as gpd\nimport shapely\nfrom osgeo import gdal\n# home directory\nhome = str(Path.home())\n\n# importing data\ndf=pd.read_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv')\n\ntime_=df['YYYYMMDDHH'].unique()\ndfs = dict(tuple(df.groupby('YYYYMMDDHH'))) # grouping the data by YYMMDDHH\n\nfor t in time_:\n df=dfs[t]\n gdf = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))\n gdf = gdf.set_crs(\"EPSG:4326\",allow_override=True)\n #gdf = gdf.set_crs(\"EPSG:4326\")\n gdf.plot(column='prediction')\n plt.savefig(home+\"/cmaq/prediction_maps/maps/\"+str(t)+\".tif\")\n \n\n \n \n \n \n\n# total area for the grid\n# xmin, ymin, xmax, ymax= gdf.geometry.total_bounds\n# gdf=gdal.OpenEx(gdf.to_json(), gdal.OF_VECTOR)\n# how many cells across and down\n# xsize=422\n# ysize=265\n# gdal.Grid(home+\"/cmaq/prediction_maps/\"+str(t)+\".tif\", gdf, zfield=\"prediction\",outputSRS =\"EPSG:4326\", algorithm=\"linear\", outputBounds=[xmax,ymax,xmin,ymin], width=xsize, height=ysize)\n", + "history_output" : "/home/mislam25/anaconda3/lib/python3.9/site-packages/geopandas/plotting.py:661: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).\n fig, ax = plt.subplots(figsize=figsize)\n", + "history_begin_time" : 1650252772225, + "history_end_time" : 1650253523466, + "history_notes" : null, + "history_process" : "6oifw4", + "host_id" : "3wgogh", + "indicator" : "Done" +},] diff --git a/history/process_ah91af.json b/history/process_ah91af.json new file mode 100644 index 0000000..c635902 --- /dev/null +++ b/history/process_ah91af.json @@ -0,0 +1,11 @@ +[{ + "history_id" : "9eib9x5vep4", + "history_input" : "#!/bin/bash\nmodule load ffmpeg", + "history_output" : "Running", + "history_begin_time" : 1654542710848, + "history_end_time" : 1654542714378, + "history_notes" : null, + "history_process" : "ah91af", + "host_id" : "p6wvf2", + "indicator" : "Done" +},] diff --git a/history/process_b8uv5z.json b/history/process_b8uv5z.json new file mode 100644 index 0000000..ac55914 --- /dev/null +++ b/history/process_b8uv5z.json @@ -0,0 +1,241 @@ +[{ + "history_id" : "h2sxykdhd89", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "history_output" : "Running", + "history_begin_time" : 1656428784905, + "history_end_time" : 1656450968069, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "gs063ewii5p", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428417899, + "history_end_time" : 1656428429386, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "z2OTrDQfbKG1", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "history_output" : "Running", + "history_begin_time" : 1655490516867, + "history_end_time" : 1656318988073, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "vmsgxwkDgAX9", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\",index=False)", + "history_output" : "Running", + "history_begin_time" : 1655276958434, + "history_end_time" : 1656318987456, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "4UMZNmWINDCi", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Running", + "history_begin_time" : 1655078033924, + "history_end_time" : 1656318986937, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "c3xunsstl48", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Running", + "history_begin_time" : 1655073498802, + "history_end_time" : 1655073601651, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "87p1pyxeyia", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Traceback (most recent call last):\n File \"test_data.py\", line 6, in \n cmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/test_data.csv'\n", + "history_begin_time" : 1654477449165, + "history_end_time" : 1654477477629, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "dr9onztcfzr", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Traceback (most recent call last):\n File \"test_data.py\", line 6, in \n cmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 676, in parser_f\n return _read(filepath_or_buffer, kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 448, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 880, in __init__\n self._make_engine(self.engine)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1114, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1891, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas\\_libs\\parsers.pyx\", line 374, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas\\_libs\\parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] File C:\\Users\\JensenSun/cmaq/test_data.csv does not exist: 'C:\\\\Users\\\\JensenSun/cmaq/test_data.csv'\n", + "history_begin_time" : 1654465999479, + "history_end_time" : 1654466000025, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "100001", + "indicator" : "Failed" +},{ + "history_id" : "kg1t6le7j3s", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Traceback (most recent call last):\n File \"test_data.py\", line 6, in \n cmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/test_data.csv'\n", + "history_begin_time" : 1654456069764, + "history_end_time" : 1654456095482, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "79k5igd3lip", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1654319745196, + "history_end_time" : 1654319833416, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "43g0l0snuv0", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Traceback (most recent call last):\n File \"test_data.py\", line 6, in \n cmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/test_data.csv'\n", + "history_begin_time" : 1654317918359, + "history_end_time" : 1654317950935, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "uaf4sbot356", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Traceback (most recent call last):\n File \"test_data.py\", line 6, in \n cmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/test_data.csv'\n", + "history_begin_time" : 1654314692985, + "history_end_time" : 1654314735195, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "vgq2339nmrc", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1653698490119, + "history_end_time" : 1653698615352, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "qz78x01107q", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1653698392955, + "history_end_time" : 1653698475052, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "etusm6ishr9", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "Traceback (most recent call last):\n File \"test_data.py\", line 6, in \n cmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 676, in parser_f\n return _read(filepath_or_buffer, kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 448, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 880, in __init__\n self._make_engine(self.engine)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1114, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1891, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas\\_libs\\parsers.pyx\", line 374, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas\\_libs\\parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] File C:\\Users\\JensenSun/cmaq/test_data.csv does not exist: 'C:\\\\Users\\\\JensenSun/cmaq/test_data.csv'\n", + "history_begin_time" : 1652934716546, + "history_end_time" : 1652934717102, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "100001", + "indicator" : "Done" +},{ + "history_id" : "pqwcp469fyi", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1652832229164, + "history_end_time" : 1652832313323, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "z6ke7lqrm2q", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1652831359623, + "history_end_time" : 1652831447653, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "65isind7el0", + "history_input" : null, + "history_output" : "Remote SCP command had error: scp: 65isind7el0.tar: Disk quota exceeded", + "history_begin_time" : 1652786139673, + "history_end_time" : 1652786141679, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "f8b1qxzw08d", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1652055212953, + "history_end_time" : 1652055297109, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "tk97p0j88kq", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1652048118351, + "history_end_time" : 1652048197065, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "9jnwki7e9s7", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1650480846339, + "history_end_time" : 1650480978174, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "mk7uon7ya8f", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1650473789759, + "history_end_time" : 1650473915149, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "z20aw1llntk", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1650252523102, + "history_end_time" : 1650252619157, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "aosgbkzntki", + "history_input" : "import pandas as pd\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\ncmaq=pd.read_csv(home+\"/cmaq/test_data.csv\")\n\n# dropping unnecessary variables\ncmaq['YYYYMMDDHH'] = cmaq['YYYYMMDDHH'].map(str)\ncmaq['month'] = cmaq['YYYYMMDDHH'].str[4:6]\ncmaq['day'] = cmaq['YYYYMMDDHH'].str[6:8]\ncmaq['hours'] = cmaq['YYYYMMDDHH'].str[8:10]\n\n#new_df=cmaq.drop(['YYYYMMDDHH'],axis=1)\ncmaq.to_csv(home+\"/cmaq/testing.csv\",index=False)", + "history_output" : "", + "history_begin_time" : 1650215097103, + "history_end_time" : 1650215191122, + "history_notes" : null, + "history_process" : "b8uv5z", + "host_id" : "3wgogh", + "indicator" : "Done" +},] diff --git a/history/process_ex3vh9.json b/history/process_ex3vh9.json new file mode 100644 index 0000000..791682f --- /dev/null +++ b/history/process_ex3vh9.json @@ -0,0 +1,511 @@ +[{ + "history_id" : "orxddctze9j", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220626\n20220625\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022062612 ... 9.0 592.0 0.0\n1 21.855751 -120.512497 2022062612 ... 9.0 590.0 0.0\n2 21.882309 -120.404144 2022062612 ... 9.0 589.0 0.0\n3 21.908745 -120.295715 2022062612 ... 9.0 587.0 0.0\n4 21.935051 -120.187225 2022062612 ... 8.0 585.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1656428452856, + "history_end_time" : 1656450968066, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "vwnsae06o0m", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(2)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428411868, + "history_end_time" : 1656428429384, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "ra78VXpBhR2N", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(3)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\ndays = ['20220615','20220614', '20220613', '20220612']\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220615\n20220614\n20220613\n20220612\n11244480\n11244480\n11244480\n11244480\n11244480\n11244480\n11244480\n11244480\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022061512 ... 342.0 595.0 0.0\n1 21.855751 -120.512497 2022061512 ... 342.0 594.0 0.0\n2 21.882309 -120.404144 2022061512 ... 341.0 592.0 0.0\n3 21.908745 -120.295715 2022061512 ... 341.0 591.0 0.0\n4 21.935051 -120.187225 2022061512 ... 341.0 589.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1655489193303, + "history_end_time" : 1655490491850, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "kHJiGOZQYjvf", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(3)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv('/groups/ESS/aalnaim/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220613\n20220612\n20220611\n8433360\n8433360\n8433360\n8433360\n8433360\n8433360\n8433360\n8433360\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022061312 ... 347.0 341.0 0.0\n1 21.855751 -120.512497 2022061312 ... 346.0 338.0 0.0\n2 21.882309 -120.404144 2022061312 ... 346.0 311.0 1.0\n3 21.908745 -120.295715 2022061312 ... 347.0 300.0 1.0\n4 21.935051 -120.187225 2022061312 ... 348.0 308.0 1.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1655276511890, + "history_end_time" : 1655490491266, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "X8cRTB8AM4y9", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(7)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220610\n20220609\n20220608\n20220607\n20220606\n20220605\n20220604\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022061012 ... 315.0 599.0 0.0\n1 21.855751 -120.512497 2022061012 ... 315.0 597.0 0.0\n2 21.882309 -120.404144 2022061012 ... 315.0 596.0 0.0\n3 21.908745 -120.295715 2022061012 ... 315.0 594.0 0.0\n4 21.935051 -120.187225 2022061012 ... 314.0 593.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1655077019296, + "history_end_time" : 1655490490336, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "bpsieog903q", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(7)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220610\n20220609\n20220608\n20220607\n20220606\n20220605\n20220604\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n Latitude Longitude YYYYMMDDHH\n0 21.829086 -120.620789 2022061012\n1 21.855751 -120.512497 2022061012\n2 21.882309 -120.404144 2022061012\n3 21.908745 -120.295715 2022061012\n4 21.935051 -120.187225 2022061012\n", + "history_begin_time" : 1655072871942, + "history_end_time" : 1655073496197, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "xDAf45QSyIE9", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today() - timedelta(days=2)\ndate_list = [base - timedelta(days=x) for x in range(7)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220610\n20220609\n20220608\n20220607\n20220606\n20220605\n20220604\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n Latitude Longitude YYYYMMDDHH\n0 21.829086 -120.620789 2022061012\n1 21.855751 -120.512497 2022061012\n2 21.882309 -120.404144 2022061012\n3 21.908745 -120.295715 2022061012\n4 21.935051 -120.187225 2022061012\n", + "history_begin_time" : 1655070684424, + "history_end_time" : 1655076929974, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "kGkFHxwTajuH", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today()\ndate_list = [base - timedelta(days=x) for x in range(7)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220612\n20220611\n20220610\n20220609\n20220608\n20220607\n20220606\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n19677840\n Latitude Longitude YYYYMMDDHH\n0 21.829086 -120.620789 2022061212\n1 21.855751 -120.512497 2022061212\n2 21.882309 -120.404144 2022061212\n3 21.908745 -120.295715 2022061212\n4 21.935051 -120.187225 2022061212\n", + "history_begin_time" : 1655068448119, + "history_end_time" : 1655076930531, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "PveR6QlvcIvf", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today()\ndate_list = [base - timedelta(days=x) for x in range(7)]\ndays = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220607\n20220606\n20220605\n20220604\n20220603\n20220602\n20220601\n2811120\n2811120\n2811120\n2811120\n14055600\n19677840\n19677840\n19677840\n Latitude Longitude YYYYMMDDHH\n0 21.829086 -120.620789 2022060712\n1 21.855751 -120.512497 2022060712\n2 21.882309 -120.404144 2022060712\n3 21.908745 -120.295715 2022060712\n4 21.935051 -120.187225 2022060712\n", + "history_begin_time" : 1654616977749, + "history_end_time" : 1654620106124, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "YrQh3bYU76XA", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\nbase = datetime.datetime.today()\ndate_list = [base - timedelta(days=x) for x in range(7)]\ndate_list = [date.strftime('%Y%m%d') for date in date_list]\n\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"processing_test_data.py\", line 19, in \n for i in days:\nNameError: name 'days' is not defined\n", + "history_begin_time" : 1654611118035, + "history_end_time" : 1654611601419, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "yszMq4eHJEZq", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\nfrom datetime import timedelta\n# home directory\nhome = str(Path.home())\n\ndt = datetime.datetime.today()\nstart = (dt - timedelta(days = (dt.weekday() + 2) % 7)) - timedelta(days=7)\nend = (start + timedelta(days=6))\nstart = start.strftime('%Y%m%d')\nend = end.strftime('%Y%m%d')\ndays=[start, end]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220528\n20220603\n2811120\n2811120\n2811120\n2811120\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH\n0 21.829086 -120.620789 2022052812\n1 21.855751 -120.512497 2022052812\n2 21.882309 -120.404144 2022052812\n3 21.908745 -120.295715 2022052812\n4 21.935051 -120.187225 2022052812\n", + "history_begin_time" : 1654610368754, + "history_end_time" : 1654611597790, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "0kFrv2SwqWbK", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ndt = datetime.datetime.today()\nstart = (dt - timedelta(days = (dt.weekday() + 2) % 7)) - timedelta(days=7)\nend = (start + timedelta(days=6))\nstart = start.strftime('%Y%m%d')\nend = end.strftime('%Y%m%d')\ndays=[start, end]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"processing_test_data.py\", line 11, in \n start = (dt - timedelta(days = (dt.weekday() + 2) % 7)) - timedelta(days=7)\nNameError: name 'timedelta' is not defined\n", + "history_begin_time" : 1654610330395, + "history_end_time" : 1654611597294, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "GihqxlfgqqKe", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ndt = datetime.today()\nstart = (dt - timedelta(days = (dt.weekday() + 2) % 7)) - timedelta(days=7)\nend = (start + timedelta(days=6))\nstart = start.strftime('%Y%m%d')\nend = end.strftime('%Y%m%d')\ndays=[start, end]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "Traceback (most recent call last):\n File \"processing_test_data.py\", line 10, in \n dt = datetime.today()\nAttributeError: module 'datetime' has no attribute 'today'\n", + "history_begin_time" : 1654610243184, + "history_end_time" : 1654611596701, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "jPLF1AvFF9R4", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ndt = datetime.today()\nstart = (dt - timedelta(days = (dt.weekday() + 2) % 7)) - timedelta(days=7)\nend = (start + timedelta(days=6))\nstart = start.strftime('%Y%m%d')\nend = end..strftime('%Y%m%d')\ndays=[start, end]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : " File \"processing_test_data.py\", line 14\n end = end..strftime('%Y%m%d')\n ^\nSyntaxError: invalid syntax\n", + "history_begin_time" : 1654610053562, + "history_end_time" : 1654610233931, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "OHGom6kjOLXn", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today, pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220606\n20220605\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654534602317, + "history_end_time" : 1654534633990, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "hxxQG0xh7FWU", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=5)\npday=pday_.strftime('%Y%m%d')\nfday_= pday_ + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[pday, fday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220601\n20220602\n2811120\n2811120\n2811120\n2811120\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH\n0 21.829086 -120.620789 2022060112\n1 21.855751 -120.512497 2022060112\n2 21.882309 -120.404144 2022060112\n3 21.908745 -120.295715 2022060112\n4 21.935051 -120.187225 2022060112\n", + "history_begin_time" : 1654534255229, + "history_end_time" : 1654534633004, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "oQXGDeARIud0", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=5)\npday=pday_.strftime('%Y%m%d')\nfday_= pday_ + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[pday, fday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n\n", + "history_output" : "20220601\n20220602\n2811120\n2811120\n2811120\n2811120\n5622240\n5622240\n5622240\n5622240\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 209, in \n dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/pandas/core/frame.py\", line 636, in __init__\n mgr = dict_to_mgr(data, index, columns, dtype=dtype, copy=copy, typ=manager)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/pandas/core/internals/construction.py\", line 502, in dict_to_mgr\n return arrays_to_mgr(arrays, columns, index, dtype=dtype, typ=typ, consolidate=copy)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/pandas/core/internals/construction.py\", line 120, in arrays_to_mgr\n index = _extract_index(arrays)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/pandas/core/internals/construction.py\", line 674, in _extract_index\n raise ValueError(\"All arrays must be of the same length\")\nValueError: All arrays must be of the same length\n", + "history_begin_time" : 1654533966876, + "history_end_time" : 1654534632522, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "OswE1bo0Y2hR", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=5)\npday=pday_.strftime('%Y%m%d')\nfday_= pday_ + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[pday, fday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\n\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\nprint(files)\nfor j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n # O3 variable\n # O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n # NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n\n cmaq_O3=list(np.concatenate(aa).flat) \n print(len(cmaq_O3))\n del aa\n cmaq_NO2=list(np.concatenate(bb).flat) \n print(len(cmaq_NO2))\n del bb\n cmaq_CO=list(np.concatenate(cc).flat) \n print(len(cmaq_CO))\n del cc\n\n cmaq_PM25_CO=list(np.concatenate(ee).flat)\n\n del ee\n \n\n\n\n# read mcip results \n# date must be later of 20210101\nfiles = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\nfor j in files:\n df = xr.open_dataset(j)\n for k in t:\n # CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n # NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n # NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n\n #NO_emi=list(np.concatenate(hh).flat) \n #del hh\n PRSFC=list(np.concatenate(ii).flat) \n del ii\n PBL=list(np.concatenate(jj).flat) \n del jj\n TEMP2=list(np.concatenate(kk).flat) \n del kk\n WSPD10=list(np.concatenate(ll).flat) \n del ll\n WDIR10=list(np.concatenate(mm).flat)\n del mm\n\n RGRND=list(np.concatenate(oo1).flat) \n del oo1\n #RN=list(np.concatenate(pp).flat)\n #del pp\n #RC=list(np.concatenate(qq).flat)\n #del qq\n CFRAC=list(np.concatenate(rr).flat)\n print(len(CFRAC))\n del rr\n \n \n\n# CO_emi=list(np.concatenate(ff).flat) \n# print(len(CO_emi))\n# del ff\n\n\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\nprint(dat)\n\n\n", + "history_output" : "20220601\n20220602\n['/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/emis_mole_all_20220602_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf']\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 49, in \n oo=df.variables['O3'][:].values[k,0]\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/xarray/core/utils.py\", line 457, in __getitem__\n return self.mapping[key]\nKeyError: 'O3'\n", + "history_begin_time" : 1654533897147, + "history_end_time" : 1654534631971, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "N6CXBr90E8kv", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=5)\npday=pday_.strftime('%Y%m%d')\nfday_= pday_ + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\n\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\nprint(files)\nfor j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n # O3 variable\n # O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n # NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n\n cmaq_O3=list(np.concatenate(aa).flat) \n print(len(cmaq_O3))\n del aa\n cmaq_NO2=list(np.concatenate(bb).flat) \n print(len(cmaq_NO2))\n del bb\n cmaq_CO=list(np.concatenate(cc).flat) \n print(len(cmaq_CO))\n del cc\n\n cmaq_PM25_CO=list(np.concatenate(ee).flat)\n\n del ee\n \n\n\n\n# read mcip results \n# date must be later of 20210101\nfiles = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\nfor j in files:\n df = xr.open_dataset(j)\n for k in t:\n # CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n # NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n # NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n\n #NO_emi=list(np.concatenate(hh).flat) \n #del hh\n PRSFC=list(np.concatenate(ii).flat) \n del ii\n PBL=list(np.concatenate(jj).flat) \n del jj\n TEMP2=list(np.concatenate(kk).flat) \n del kk\n WSPD10=list(np.concatenate(ll).flat) \n del ll\n WDIR10=list(np.concatenate(mm).flat)\n del mm\n\n RGRND=list(np.concatenate(oo1).flat) \n del oo1\n #RN=list(np.concatenate(pp).flat)\n #del pp\n #RC=list(np.concatenate(qq).flat)\n #del qq\n CFRAC=list(np.concatenate(rr).flat)\n print(len(CFRAC))\n del rr\n \n \n\n# CO_emi=list(np.concatenate(ff).flat) \n# print(len(CO_emi))\n# del ff\n\n\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\nprint(dat)\n\n\n", + "history_output" : "20220606\n20220601\n['/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/emis_mole_all_20220601_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf']\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 49, in \n oo=df.variables['O3'][:].values[k,0]\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/xarray/core/utils.py\", line 457, in __getitem__\n return self.mapping[key]\nKeyError: 'O3'\n", + "history_begin_time" : 1654533778350, + "history_end_time" : 1654534631489, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "XT7Pk2USdvgX", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\n\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\nprint(files)\nfor j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n # O3 variable\n # O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n # NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n\n cmaq_O3=list(np.concatenate(aa).flat) \n print(len(cmaq_O3))\n del aa\n cmaq_NO2=list(np.concatenate(bb).flat) \n print(len(cmaq_NO2))\n del bb\n cmaq_CO=list(np.concatenate(cc).flat) \n print(len(cmaq_CO))\n del cc\n\n cmaq_PM25_CO=list(np.concatenate(ee).flat)\n\n del ee\n \n\n\n\n# read mcip results \n# date must be later of 20210101\nfiles = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\nfor j in files:\n df = xr.open_dataset(j)\n for k in t:\n # CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n # NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n # NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n\n #NO_emi=list(np.concatenate(hh).flat) \n #del hh\n PRSFC=list(np.concatenate(ii).flat) \n del ii\n PBL=list(np.concatenate(jj).flat) \n del jj\n TEMP2=list(np.concatenate(kk).flat) \n del kk\n WSPD10=list(np.concatenate(ll).flat) \n del ll\n WDIR10=list(np.concatenate(mm).flat)\n del mm\n\n RGRND=list(np.concatenate(oo1).flat) \n del oo1\n #RN=list(np.concatenate(pp).flat)\n #del pp\n #RC=list(np.concatenate(qq).flat)\n #del qq\n CFRAC=list(np.concatenate(rr).flat)\n print(len(CFRAC))\n del rr\n \n \n\n# CO_emi=list(np.concatenate(ff).flat) \n# print(len(CO_emi))\n# del ff\n\n\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\nprint(dat)\n\n\n", + "history_output" : "20220606\n20220605\n[]\n5622240\n5622240\n5622240\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 201, in \n dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nNameError: name 'cmaq_O3' is not defined\n", + "history_begin_time" : 1654533456703, + "history_end_time" : 1654534630954, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "gM2EBitWsivj", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\n\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\nfor j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n # O3 variable\n # O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n # NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n\n cmaq_O3=list(np.concatenate(aa).flat) \n print(len(cmaq_O3))\n del aa\n cmaq_NO2=list(np.concatenate(bb).flat) \n print(len(cmaq_NO2))\n del bb\n cmaq_CO=list(np.concatenate(cc).flat) \n print(len(cmaq_CO))\n del cc\n\n cmaq_PM25_CO=list(np.concatenate(ee).flat)\n\n del ee\n \n\n\n\n# read mcip results \n# date must be later of 20210101\nfiles = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\nfor j in files:\n df = xr.open_dataset(j)\n for k in t:\n # CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n # NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n # NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n\n #NO_emi=list(np.concatenate(hh).flat) \n #del hh\n PRSFC=list(np.concatenate(ii).flat) \n del ii\n PBL=list(np.concatenate(jj).flat) \n del jj\n TEMP2=list(np.concatenate(kk).flat) \n del kk\n WSPD10=list(np.concatenate(ll).flat) \n del ll\n WDIR10=list(np.concatenate(mm).flat)\n del mm\n\n RGRND=list(np.concatenate(oo1).flat) \n del oo1\n #RN=list(np.concatenate(pp).flat)\n #del pp\n #RC=list(np.concatenate(qq).flat)\n #del qq\n CFRAC=list(np.concatenate(rr).flat)\n print(len(CFRAC))\n del rr\n \n \n\n# CO_emi=list(np.concatenate(ff).flat) \n# print(len(CO_emi))\n# del ff\n\n\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\nprint(dat)\n\n\n", + "history_output" : "20220606\n20220605\n5622240\n5622240\n5622240\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 201, in \n dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nNameError: name 'cmaq_O3' is not defined\n", + "history_begin_time" : 1654533256095, + "history_end_time" : 1654534630506, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "KhUW39AUfb9B", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\n\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\nfor j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n # O3 variable\n # O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n # NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n\n cmaq_O3=list(np.concatenate(aa).flat) \n print(len(cmaq_O3))\n del aa\n cmaq_NO2=list(np.concatenate(bb).flat) \n print(len(cmaq_NO2))\n del bb\n cmaq_CO=list(np.concatenate(cc).flat) \n print(len(cmaq_CO))\n del cc\n\n cmaq_PM25_CO=list(np.concatenate(ee).flat)\n\n del ee\n \n\n\n\n# read mcip results \n# date must be later of 20210101\nfiles = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\nfor j in files:\n df = xr.open_dataset(j)\n for k in t:\n # CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n # NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n # NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n\n #NO_emi=list(np.concatenate(hh).flat) \n #del hh\n PRSFC=list(np.concatenate(ii).flat) \n del ii\n PBL=list(np.concatenate(jj).flat) \n del jj\n TEMP2=list(np.concatenate(kk).flat) \n del kk\n WSPD10=list(np.concatenate(ll).flat) \n del ll\n WDIR10=list(np.concatenate(mm).flat)\n del mm\n\n RGRND=list(np.concatenate(oo1).flat) \n del oo1\n #RN=list(np.concatenate(pp).flat)\n #del pp\n #RC=list(np.concatenate(qq).flat)\n #del qq\n CFRAC=list(np.concatenate(rr).flat)\n print(len(CFRAC))\n del rr\n \n \n\n# CO_emi=list(np.concatenate(ff).flat) \n# print(len(CO_emi))\n# del ff\n\n\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\nprint(dat)\n\n\n", + "history_output" : "20220606\n20220605\n5622240\n5622240\n5622240\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 201, in \n dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nNameError: name 'cmaq_O3' is not defined\n", + "history_begin_time" : 1654533177041, + "history_end_time" : 1654534629954, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "VsWzWokYSjVW", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220630:\n print(\"yes4\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\nfor j in files:\n print(j)\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n print(aa)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\nfiles = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\nfor j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220606\nyes3\n20220605\nyes3\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 157, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654532852393, + "history_end_time" : 1654534629505, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "s5MSJsC6Kqdw", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220630:\n print(\"yes4\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\nfor j in files:\n print(j)\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n print(aa)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : " File \"processing_test_data.py\", line 107\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n ^\nIndentationError: unindent does not match any outer indentation level\n", + "history_begin_time" : 1654532795982, + "history_end_time" : 1654534628923, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "1nrCjjDGdpzc", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n print(j)\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n print(aa)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220606\nyes3\n20220605\nyes3\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 156, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654532443644, + "history_end_time" : 1654534638789, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "jpNJloWTR1tk", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\tprint(j)\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n print(aa)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : " File \"processing_test_data.py\", line 75\n print(j)\n ^\nTabError: inconsistent use of tabs and spaces in indentation\n", + "history_begin_time" : 1654532420390, + "history_end_time" : 1654534638188, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "xmzhQE1tcFmr", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n print(aa)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220606\nyes3\n20220605\nyes3\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 156, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654532365070, + "history_end_time" : 1654534637704, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "ddLV6ktoF1I8", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n print(aa)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220606\nyes3\n20220605\nyes3\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 156, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654532331904, + "history_end_time" : 1654534637171, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "7sW7NLZP574T", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"yes1\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n print(\"yes2\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n print(\"yes3\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220606\nyes3\n20220605\nyes3\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 155, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654532218701, + "history_end_time" : 1654534636706, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "16aXb5W0HEfC", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220630:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220606\n20220605\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 152, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654532150426, + "history_end_time" : 1654534636172, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "y3bc17e5udv", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220605\n20220604\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 5, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654477412232, + "history_end_time" : 1654477448684, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "ophrppdyibn", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220605\n20220604\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 6, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654465998552, + "history_end_time" : 1654465999407, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "100001", + "indicator" : "Failed" +},{ + "history_id" : "09syopvs1cj", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220605\n20220604\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 5, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654456035636, + "history_end_time" : 1654456069663, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "CsQLFZxde8OA", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n print(\"YES\")\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220604\n20220603\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 142, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654321911911, + "history_end_time" : 1654321928950, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Done" +},{ + "history_id" : "7q2VqlXyJpIa", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220604\n20220603\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654321847557, + "history_end_time" : 1654321865170, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Done" +},{ + "history_id" : "T65lA2clssKe", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1654320630130, + "history_end_time" : 1654320631851, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : null, + "indicator" : "Done" +},{ + "history_id" : "z3cnnln1v3n", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220604\n20220603\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 180, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654319723927, + "history_end_time" : 1654319745021, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "2nbvwn43c52", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220604\n20220603\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 5, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654317871400, + "history_end_time" : 1654317918048, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "c0ktfkmow7t", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220603\n20220602\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 5, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1654314655943, + "history_end_time" : 1654314692848, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "86jiqh87wnv", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220527\n20220526\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022052712 ... 17.0 578.0 0.0\n1 21.855751 -120.512497 2022052712 ... 16.0 576.0 0.0\n2 21.882309 -120.404144 2022052712 ... 16.0 575.0 0.0\n3 21.908745 -120.295715 2022052712 ... 16.0 573.0 0.0\n4 21.935051 -120.187225 2022052712 ... 16.0 571.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1653698223684, + "history_end_time" : 1653698489488, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "5aogvy3akz4", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220527\n20220526\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022052712 ... 17.0 578.0 0.0\n1 21.855751 -120.512497 2022052712 ... 16.0 576.0 0.0\n2 21.882309 -120.404144 2022052712 ... 16.0 575.0 0.0\n3 21.908745 -120.295715 2022052712 ... 16.0 573.0 0.0\n4 21.935051 -120.187225 2022052712 ... 16.0 571.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1653698126770, + "history_end_time" : 1653698392328, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "y78mtjrm28e", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220519\n20220518\nTraceback (most recent call last):\n File \"processing_test_data.py\", line 141, in \n cmaq_O3=list(np.concatenate(aa).flat) \n File \"<__array_function__ internals>\", line 6, in concatenate\nValueError: need at least one array to concatenate\n", + "history_begin_time" : 1652934715358, + "history_end_time" : 1652934716198, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "100001", + "indicator" : "Done" +},{ + "history_id" : "rkb9aeni5ts", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220517\n20220516\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022051712 ... 13.0 587.0 0.0\n1 21.855751 -120.512497 2022051712 ... 13.0 586.0 0.0\n2 21.882309 -120.404144 2022051712 ... 13.0 584.0 0.0\n3 21.908745 -120.295715 2022051712 ... 13.0 583.0 0.0\n4 21.935051 -120.187225 2022051712 ... 13.0 581.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1652831960005, + "history_end_time" : 1652832228551, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "3mmbvxznj0w", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220517\n20220516\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022051712 ... 13.0 587.0 0.0\n1 21.855751 -120.512497 2022051712 ... 13.0 586.0 0.0\n2 21.882309 -120.404144 2022051712 ... 13.0 584.0 0.0\n3 21.908745 -120.295715 2022051712 ... 13.0 583.0 0.0\n4 21.935051 -120.187225 2022051712 ... 13.0 581.0 0.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1652831094620, + "history_end_time" : 1652831359543, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "nzkd32bw9gd", + "history_input" : null, + "history_output" : "Remote SCP command had error: scp: nzkd32bw9gd.tar: Disk quota exceeded", + "history_begin_time" : 1652786138024, + "history_end_time" : 1652786139520, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "cgaalfh045q", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220508\n20220507\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022050812 ... 9.0 282.0 0.0\n1 21.855751 -120.512497 2022050812 ... 9.0 281.0 0.0\n2 21.882309 -120.404144 2022050812 ... 8.0 278.0 1.0\n3 21.908745 -120.295715 2022050812 ... 8.0 277.0 1.0\n4 21.935051 -120.187225 2022050812 ... 7.0 279.0 1.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1652054986013, + "history_end_time" : 1652055212321, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "jat9fidsg1o", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220508\n20220507\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... WDIR10(degree) RGRND(W/m2) CFRAC\n0 21.829086 -120.620789 2022050812 ... 9.0 282.0 0.0\n1 21.855751 -120.512497 2022050812 ... 9.0 281.0 0.0\n2 21.882309 -120.404144 2022050812 ... 8.0 278.0 1.0\n3 21.908745 -120.295715 2022050812 ... 8.0 277.0 1.0\n4 21.935051 -120.187225 2022050812 ... 7.0 279.0 1.0\n\n[5 rows x 15 columns]\n", + "history_begin_time" : 1652047846389, + "history_end_time" : 1652048118277, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "7cobt8kdx7i", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220420\n20220419\n2811120\n2811120\n2811120\n2811120\n2811120\n5622240\n5622240\n5622240\nTraceback (most recent call last):\n File \"/home/mislam25/gw-workspace/7cobt8kdx7i/processing_test_data.py\", line 209, in \n dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/frame.py\", line 614, in __init__\n mgr = dict_to_mgr(data, index, columns, dtype=dtype, copy=copy, typ=manager)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/internals/construction.py\", line 464, in dict_to_mgr\n return arrays_to_mgr(\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/internals/construction.py\", line 119, in arrays_to_mgr\n index = _extract_index(arrays)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/internals/construction.py\", line 635, in _extract_index\n raise ValueError(\"All arrays must be of the same length\")\nValueError: All arrays must be of the same length\n", + "history_begin_time" : 1650480551788, + "history_end_time" : 1650480845629, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "t59moyyfae7", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\n\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\n\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\n\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\n#RN=list(np.concatenate(pp).flat)\n#del pp\n#RC=list(np.concatenate(qq).flat)\n#del qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220420\n20220419\n2811120\n2811120\n2811120\n2811120\n2811120\n5622240\n5622240\n5622240\nTraceback (most recent call last):\n File \"/home/mislam25/gw-workspace/t59moyyfae7/processing_test_data.py\", line 209, in \n dat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'RGRND(W/m2)':RGRND,'CFRAC':CFRAC})\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/frame.py\", line 614, in __init__\n mgr = dict_to_mgr(data, index, columns, dtype=dtype, copy=copy, typ=manager)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/internals/construction.py\", line 464, in dict_to_mgr\n return arrays_to_mgr(\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/internals/construction.py\", line 119, in arrays_to_mgr\n index = _extract_index(arrays)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/core/internals/construction.py\", line 635, in _extract_index\n raise ValueError(\"All arrays must be of the same length\")\nValueError: All arrays must be of the same length\n", + "history_begin_time" : 1650473497697, + "history_end_time" : 1650473789079, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "7lxad71eov1", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n # PM25_EC\n oo=df.variables['PM25_EC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n dd.append(o3tp)\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n gg.append(o3tp)\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n # NO\n oo=df.variables['WSTAR'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n nn.append(o3tp)\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n # NO\n oo=df.variables['RN'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n pp.append(o3tp)\n \t# NO2\n oo=df.variables['RC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n qq.append(o3tp)\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\ncmaq_PM25_EC=list(np.concatenate(dd).flat) \nprint(len(cmaq_PM25_EC))\ndel dd\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\nNO2_emi=list(np.concatenate(gg).flat) \nprint(len(NO2_emi))\ndel gg\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\nWSTAR=list(np.concatenate(nn).flat) \ndel nn\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\nRN=list(np.concatenate(pp).flat)\ndel pp\nRC=list(np.concatenate(qq).flat)\ndel qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_EC(ug/m3)':cmaq_PM25_EC,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'NO2(moles/s)':NO2_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'WSTAR(m/s)':WSTAR,'RGRND(W/m2)':RGRND,'RN(cm)':RN,'RC(cm)':RC,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220417\n20220416\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... RN(cm) RC(cm) CFRAC\n0 21.829086 -120.620789 2022041712 ... 0.0 0.0 0.0\n1 21.855751 -120.512497 2022041712 ... 0.0 0.0 0.0\n2 21.882309 -120.404144 2022041712 ... 0.0 0.0 0.0\n3 21.908745 -120.295715 2022041712 ... 0.0 0.0 0.0\n4 21.935051 -120.187225 2022041712 ... 0.0 0.0 0.0\n\n[5 rows x 20 columns]\n", + "history_begin_time" : 1650252115378, + "history_end_time" : 1650252522395, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "r7nzz3da9sl", + "history_input" : "import xarray as xr\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfrom pathlib import Path\nimport datetime\n# home directory\nhome = str(Path.home())\n\ntoday=datetime.datetime.today().strftime('%Y%m%d')\npday_= datetime.datetime.today() - datetime.timedelta(days=1)\npday=pday_.strftime('%Y%m%d')\nfday_= datetime.datetime.today() + datetime.timedelta(days=1)\nfday=fday_.strftime('%Y%m%d')\ndays=[today,pday]\naa,bb,cc,dd,ee,ff,gg,hh,ii,jj,kk,ll,mm,nn,oo1,pp,qq,rr,ss=[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]\n#ff=[]\n# k = time dimension - start from 12 to match with data\nt = [12,13,14,15,16,17,18,19,20,21,22,23,0,1,2,3,4,5,6,7,8,9,10,11]\nfor i in days:\n print(i)\n # read cmaq results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20210315 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n else:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/CCTMout/12km/POST/\"+\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"+i+\"_extracted.nc\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# O3 variable\n \t# O3 variable\n oo=df.variables['O3'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp) \n aa.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n bb.append(o3tp)\n # CO\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n cc.append(o3tp)\n # PM25_EC\n oo=df.variables['PM25_EC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n dd.append(o3tp)\n # PM25_CO\n oo=df.variables['PM25_OC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ee.append(o3tp)\n \n \n # read emission results\n # old files before 20210315 are not in diractory. must choose later date.\n if int(i)>=20191231 and int(i)<=20210902:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_nobeis_2016fh_16j.ncf\")\n elif int(i)==20220303:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n\n# set todays date if they don't change dataformate \n# else if int(i)>=20220313 and int(i)<=int(today):\n elif int(i)>=20220313 and int(i)<=20220331:\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/emis2021/12km/all/\"+\"emis_mole_all_\"+i+\"_AQF5X_cmaq_cb6ae7_2017gb_17j.ncf\")\n for j in files:\n\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['CO'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ff.append(o3tp)\n \t# NO2\n oo=df.variables['NO2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n gg.append(o3tp)\n # NO\n# oo=df.variables['NO'][:].values[k,0]\n# oo3=np.ravel(oo)\n# o3tp=np.transpose(oo3)\n# o3tp=np.round(o3tp)\n# hh.append(o3tp) \n \n# read mcip results \n# date must be later of 20210101\n files = glob.glob(\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/\"+\"METCRO2D_\"+i+\".nc\")\n for j in files:\n df = xr.open_dataset(j)\n for k in t:\n \t# CO variable\n oo=df.variables['PRSFC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ii.append(o3tp)\n \t# NO2\n oo=df.variables['PBL'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n jj.append(o3tp)\n # NO\n oo=df.variables['TEMP2'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n kk.append(o3tp)\n # NO\n oo=df.variables['WSPD10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n ll.append(o3tp)\n # NO\n oo=df.variables['WDIR10'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n mm.append(o3tp)\n # NO\n oo=df.variables['WSTAR'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n nn.append(o3tp)\n # NO\n oo=df.variables['RGRND'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n oo1.append(o3tp)\n # NO\n oo=df.variables['RN'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n pp.append(o3tp)\n \t# NO2\n oo=df.variables['RC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n qq.append(o3tp)\n \t# NO2\n oo=df.variables['CFRAC'][:].values[k,0]\n oo3=np.ravel(oo)\n o3tp=np.transpose(oo3)\n o3tp=np.round(o3tp)\n rr.append(o3tp)\n \n \ncmaq_O3=list(np.concatenate(aa).flat) \nprint(len(cmaq_O3))\ndel aa\ncmaq_NO2=list(np.concatenate(bb).flat) \nprint(len(cmaq_NO2))\ndel bb\ncmaq_CO=list(np.concatenate(cc).flat) \nprint(len(cmaq_CO))\ndel cc\ncmaq_PM25_EC=list(np.concatenate(dd).flat) \nprint(len(cmaq_PM25_EC))\ndel dd\ncmaq_PM25_CO=list(np.concatenate(ee).flat)\n\ndel ee\nCO_emi=list(np.concatenate(ff).flat) \nprint(len(CO_emi))\ndel ff\nNO2_emi=list(np.concatenate(gg).flat) \nprint(len(NO2_emi))\ndel gg\n#NO_emi=list(np.concatenate(hh).flat) \n#del hh\nPRSFC=list(np.concatenate(ii).flat) \ndel ii\nPBL=list(np.concatenate(jj).flat) \ndel jj\nTEMP2=list(np.concatenate(kk).flat) \ndel kk\nWSPD10=list(np.concatenate(ll).flat) \ndel ll\nWDIR10=list(np.concatenate(mm).flat)\ndel mm\nWSTAR=list(np.concatenate(nn).flat) \ndel nn\nRGRND=list(np.concatenate(oo1).flat) \ndel oo1\nRN=list(np.concatenate(pp).flat)\ndel pp\nRC=list(np.concatenate(qq).flat)\ndel qq\nCFRAC=list(np.concatenate(rr).flat)\nprint(len(CFRAC))\ndel rr\n\n## selecting lat and long\ndf = xr.open_dataset('/home/yli74/scripts/plots/2020fire/GRIDCRO2D')\nlat_1 = df.variables['LAT'][:].values[0,0]\nlat_flt=np.ravel(lat_1)\n# need to manipulate 48 values if the next day data is available\nLAT=np.tile(lat_flt,len(days)*24)\nprint(len(LAT))\n# long\nlon_1 = df.variables['LON'][:].values[0,0]\nlon_flt=np.ravel(lon_1)\n# need to manipulate 48 values if the next day data is available\nLON=np.tile(lon_flt,len(days)*24)\nprint(len(LON))\n# creating dataframe\n\n## creatime date-time dimension\n# date-time dimension for today\ntime0=[]\nt = ['12','13','14','15','16','17','18','19','20','21','22','23','00','01','02','03','04','05','06','07','08','09','10','11']\nfor i in days:\n for j in t:\n time_0=np.full((265,442),i+j)\n time0.append(time_0)\nYYMMDDHH=list(np.concatenate(time0).flat) \nprint(len(YYMMDDHH))\n\n\n# saving variables\ndat=pd.DataFrame({'Latitude':LAT,'Longitude':LON,'YYYYMMDDHH':YYMMDDHH,'CMAQ12KM_O3(ppb)':cmaq_O3,'CMAQ12KM_NO2(ppb)':cmaq_NO2,'CMAQ12KM_CO(ppm)':cmaq_CO,'CMAQ_EC(ug/m3)':cmaq_PM25_EC,'CMAQ_OC(ug/m3)':cmaq_PM25_CO,'CO(moles/s)':CO_emi,'NO2(moles/s)':NO2_emi,'PRSFC(Pa)':PRSFC,'PBL(m)':PBL,'TEMP2(K)':TEMP2,'WSPD10(m/s)':WSPD10,'WDIR10(degree)':WDIR10,'WSTAR(m/s)':WSTAR,'RGRND(W/m2)':RGRND,'RN(cm)':RN,'RC(cm)':RC,'CFRAC':CFRAC})\nprint(dat.head())\ndat.to_csv(home+'/cmaq/test_data.csv',index=False)\n\n", + "history_output" : "20220417\n20220416\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n5622240\n Latitude Longitude YYYYMMDDHH ... RN(cm) RC(cm) CFRAC\n0 21.829086 -120.620789 2022041712 ... 0.0 0.0 0.0\n1 21.855751 -120.512497 2022041712 ... 0.0 0.0 0.0\n2 21.882309 -120.404144 2022041712 ... 0.0 0.0 0.0\n3 21.908745 -120.295715 2022041712 ... 0.0 0.0 0.0\n4 21.935051 -120.187225 2022041712 ... 0.0 0.0 0.0\n\n[5 rows x 20 columns]\n", + "history_begin_time" : 1650214699608, + "history_end_time" : 1650215096720, + "history_notes" : null, + "history_process" : "ex3vh9", + "host_id" : "3wgogh", + "indicator" : "Done" +},] diff --git a/history/process_fsk7f2.json b/history/process_fsk7f2.json new file mode 100644 index 0000000..8130035 --- /dev/null +++ b/history/process_fsk7f2.json @@ -0,0 +1,161 @@ +[{ + "history_id" : "zo92h4o0v63", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_\"$YYYYMMDD_POST\".nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 81 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656460094843, + "history_end_time" : 1656460099774, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "1om9blf3b9v", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_\"$YYYYMMDD_POST\".nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 81 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656455770012, + "history_end_time" : 1656455774247, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "f3k0oTx9hPzr", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_\"$YYYYMMDD_POST\".nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 81 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656455055750, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "9jdh6cCI2ywB", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_\"$YYYYMMDD_POST\"_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220612.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 81 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656454944033, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "K37cpUMYGKJp", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220612.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656037378359, + "history_end_time" : 1656318410402, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "URZ5dQtlYmVJ", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1656036815515, + "history_end_time" : 1656318409840, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "7N5lSRfJX2o0", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"$YYYYMMDD_POST\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1655842911258, + "history_end_time" : 1656318408966, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "kOxvtCV5OeB8", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_\"+$YYYYMMDD_POST+\"_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1655842857383, + "history_end_time" : 1656318408392, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "ULJLDcSv6Tbc", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nif [ $? -eq 0 ]; then\n echo \"Evaluation Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_eva_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nelse\n echo \"Evaluation Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\nEvaluation Completed Successfully\nRemoving ncl file: geoweaver_eva_daily_O3.ncl...\n", + "history_begin_time" : 1655766053329, + "history_end_time" : 1656318407869, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "zJuTgpzAUImY", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\n", + "history_begin_time" : 1655765891446, + "history_end_time" : 1656318407283, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "W4HJ6XyuIVRo", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"/groups/ESS/aalnaim/cmaq/results/geoweaver_evalution_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\nfatal:Dimension sizes of left hand side and right hand side of assignment do not match\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 320 in file /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n\n", + "history_begin_time" : 1655765797412, + "history_end_time" : 1655765888135, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "RC953PV6SVpX", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"geoweaver_evalution_results.txt\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t8h start\n(0)\tobs 8hour max end\n(0)\tTYPE of oO324: float\n(0)\tTYPE of mO324: float\n(0)\tTYPE of mO31d: float\n", + "history_begin_time" : 1655765709896, + "history_end_time" : 1655765887296, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "DRgxG0JTXpDn", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"geoweaver_evalution_results.txt\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl", + "history_output" : "./geoweaver-DRgxG0JTXpDn.sh: line 241: unexpected EOF while looking for matching `\"'\n./geoweaver-DRgxG0JTXpDn.sh: line 249: syntax error: unexpected end of file\n", + "history_begin_time" : 1655765608687, + "history_end_time" : 1655765886864, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "rA60MEhbSDq4", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"geoweaver_evalution_results.txt\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\n\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif", + "history_output" : "./geoweaver-rA60MEhbSDq4.sh: line 240: unexpected EOF while looking for matching `\"'\n./geoweaver-rA60MEhbSDq4.sh: line 250: syntax error: unexpected end of file\n", + "history_begin_time" : 1655765297253, + "history_end_time" : 1655765886446, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "9YGy3AeZt47p", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"geoweaver_evalution_results.txt\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif", + "history_output" : "./geoweaver-9YGy3AeZt47p.sh: line 240: unexpected EOF while looking for matching `\"'\n./geoweaver-9YGy3AeZt47p.sh: line 249: syntax error: unexpected end of file\n", + "history_begin_time" : 1655752610899, + "history_end_time" : 1655765886049, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "GMbsQxeEqffU", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport wfname=\"geoweaver_evalution_results.txt\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\nexport ofname=\"/AQF5X_Hourly_\"\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/\"\n\nexport mfname=\"COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\"\n\nexport grid_fname=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km/GRIDCRO2D_20220613.nc\" #This needs to be auto date\n\nexport dx=12000\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\nsdate=getenv(\"YYYYMMDD_POST\")\nwfname=getenv(\"wfname\")\nobs_dir=getenv(\"obs_dir_NCL\")\nofname=getenv(\"ofname\")\nmod_dir=getenv(\"postdata_dir\")\nmfname=getenv(\"mfname\")\ndkm=tofloat(getenv(\"dx\"))\ngrid_fname=(getenv(\"grid_fname\"))\n\nmaxdist=dkm/90000.0*1.414\nmaxarea=0.25\nthd=70\n\n;-----read model lat lon------\n;read lat lon\nf1 = addfile(grid_fname,\"r\")\nmlat = f1->LAT(0,0,:,:)\nmlon = f1->LON(0,0,:,:)\ndelete(f1)\nmlat1d = ndtooned(mlat)\nmlon1d = ndtooned(mlon)\ndelete([/mlat,mlon/])\n\n;-----read cmaq results-----\nf2 = addfile(mod_dir+mfname,\"r\")\nmO3 = f2->O3(:,:,:) ;ppb\n\n\nnt = dimsizes(mO3(:,0,0))\nny = dimsizes(mO3(0,:,0))\nnx = dimsizes(mO3(0,0,:))\n\nm8O3 = new((/17,ny,nx/),\"double\")\nm8maxO3 = new((/ny,nx/),\"double\")\n\ndo ih=0,16\n m8O3(ih,:,:)=dim_avg_n(mO3(ih:ih+7,:,:),0)\nend do\nm8maxO3 = dim_max_n(m8O3,0) ;type double\nmO31d_d=ndtooned(m8maxO3) ; type double\nmO31d=tofloat(mO31d_d)\n\ndelete([/f2,mO3,m8O3,m8maxO3/])\n\n;-----read obs-----\nsyyyy1=str_get_cols(sdate,0,3)\nsmm1=str_get_cols(sdate,4,5)\nsdd1=str_get_cols(sdate,6,7)\n\nymd=jul2greg(greg2jul(tointeger(syyyy1),tointeger(smm1),tointeger(sdd1),-1)+1)\nsyyyy2=tostring_with_format(ymd(0),\"%0.4i\")\nsmm2=tostring_with_format(ymd(1),\"%0.2i\")\nsdd2=tostring_with_format(ymd(2),\"%0.2i\")\n\ntolat=(/-999.0/) ;set the first data to 0\ntolon=tolat\ntoO3=tolat\n\ndo ih=12,35\n if (ih.lt.24) then\n shh=tostring_with_format(ih,\"%0.2i\")\n syyyy=syyyy1\n smm=smm1\n sdd=sdd1\n else\n shh=tostring_with_format(ih-24,\"%0.2i\")\n syyyy=syyyy2\n smm=smm2\n sdd=sdd2\n end if\n data=asciiread(obs_dir+ofname+syyyy+smm+sdd+shh+\".dat\",-1,\"string\")\n xx=array_append_record(tolat,stringtofloat(str_get_field(data(1::), 2,\",\")),0)\n yy=array_append_record(tolon,stringtofloat(str_get_field(data(1::), 3,\",\")),0)\n zz=array_append_record(toO3,stringtofloat(str_get_field(data(1::), 4,\",\")),0)\n delete([/tolat,tolon,toO3/])\n tolat=xx\n tolon=yy\n toO3=zz\n delete([/xx,yy,zz/])\n delete(data)\nend do\n\ntoO3@_FillValue = -999.0\n\n;-----calculate max ave 8 hour o3-----\noflag=tolat*0+1\naa=ind((oflag.gt.0).and.(toO3.ge.0))\nii=0\nprint(\"8h start\")\nif (any(ismissing(aa))) then\n iflag=0\nelse\n iflag=1\n olat=(/tolat(aa(0))/)\n olon=(/tolon(aa(0))/)\n oO3=(/-999.0/)\n o8O3 = new(17,\"float\")\n o8O3 = -999.0\nend if\ndelete(aa)\ndo while (iflag.gt.0)\n aa=ind((tolat.eq.olat(ii)).and.(tolon.eq.olon(ii)).and.(toO3.ge.0))\n oflag(aa)=0\n if (dimsizes(aa).eq.24) then ; calculate 24 h, so calculate 8hr ozone here\n do ih = 0, 16\n o8O3(ih) = avg(toO3(aa(ih:ih+7)))\n end do\n oO3(ii)=max(o8O3)\n end if\n o8O3 = -999.0\n delete(aa)\n aa=ind((oflag.gt.0).and.(toO3.ge.0))\n if (any(ismissing(aa))) then\n iflag=0\n else\n xx=array_append_record(olat,(/tolat(aa(0))/),0)\n yy=array_append_record(olon,(/tolon(aa(0))/),0)\n zz=array_append_record(oO3,(/-999.0/),0)\n delete([/olat,olon,oO3/])\n olat=xx\n olon=yy\n oO3=zz\n delete([/xx,yy,zz/])\n ii=ii+1\n end if\n delete(aa)\nend do\nprint(\"obs 8hour max end\")\naa=ind(oO3.ge.0)\nnobs=dimsizes(aa)\nolat24=olat(aa)\nolon24=olon(aa)\noO324=oO3(aa)\nprint(\"TYPE of oO324: \"+typeof(oO324))\ndelete([/aa,olat,olon,oO3/])\nmO324=oO324*0-999.0\nprint(\"TYPE of mO324: \"+typeof(mO324))\nprint(\"TYPE of mO31d: \"+typeof(mO31d))\nareaa=oO324*0-999.0\nareab=areaa\naread=areaa\n\n;-----find model point-----\ndo in=0,nobs-1\n dis=sqrt((mlat1d-olat24(in))^2+(mlon1d-olon24(in))^2)\n aa=minind(dis)\n ;print(in+\" \"+aa)\n if (dis(aa).lt.maxdist) then\n mO324(in)=mO31d(aa)\n cc=ind((mlat1d.ge.(olat24(in)-maxarea)).and.(mlat1d.le.(olat24(in)+maxarea)).and.\\\n (mlon1d.ge.(olon24(in)-maxarea)).and.(mlon1d.le.(olon24(in)+maxarea)))\n areaa(in)=0\n areab(in)=0\n if (oO324(in).ge.thd) then\n aread(in)=0\n if (max(mO31d(cc)).ge.thd) then\n areab(in)=1\n else\n aread(in)=1\n end if\n else\n bb=ind((olat24.ge.(olat24(in)-maxarea)).and.(olat24.le.(olat24(in)+maxarea)).and.\\\n (olon24.ge.(olon24(in)-maxarea)).and.(olon24.le.(olon24(in)+maxarea)))\n if (max(mO31d(aa)).ge.thd) then\n if (max(oO324(bb)).ge.thd) then\n areaa(in)=0\n else\n areaa(in)=1\n end if\n else\n areaa(in)=0\n end if\n delete(bb)\n end if\n delete(cc)\n end if\n delete(aa)\nend do\n\n;-----cal rmse corr nme nmb me mb-----\ntt=ind((mO324.ge.0).and.(oO324.ge.0))\n\nif (any(ismissing(tt))) then\n rmse=-999.0\n corr=-999.0\n nmb=-999.0\n nme=-999.0\n me=-999.0\n mb=-999.0\nelse\n rmse=dim_rmsd_n(oO324(tt),mO324(tt),0)\n corr=esccr(oO324(tt),mO324(tt),0)\n nmb=sum((mO324(tt)-oO324(tt)))/sum(oO324(tt))\n nme=sum(abs(oO324(tt)-mO324(tt)))/sum(oO324(tt))\n me=avg(abs(oO324(tt)-mO324(tt)))\n mb=avg((mO324(tt)-oO324(tt)))\nend if\n;-----cal ah afar-----\naa=ind((areaa+areab).gt.0)\nbb=ind((aread+areab).gt.0)\nif (any(ismissing(aa))) then\n afar=0.\nelse\n afar=tofloat(sum(areaa(aa)))/tofloat(sum(areab(aa))+sum(areaa(aa)))*100\nend if\ndelete(aa)\nif (any(ismissing(bb))) then\n ah=-999.0\nelse\n ah=tofloat(sum(areab(bb)))/tofloat(sum(areab(bb))+sum(aread(bb)))*100\nend if\ndelete(bb)\nwrite_table(wfname,\"a\",[/sdate,dimsizes(tt),avg(oO324(tt)),avg(mO324(tt)),rmse,corr,nmb,nme,mb,me,ah,afar/],\\\n \"%s,%i,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\")\ndelete(tt)\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_eva_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif", + "history_output" : "./geoweaver-GMbsQxeEqffU.sh: line 240: unexpected EOF while looking for matching `\"'\n./geoweaver-GMbsQxeEqffU.sh: line 249: syntax error: unexpected end of file\n", + "history_begin_time" : 1655752538800, + "history_end_time" : 1655765885598, + "history_notes" : null, + "history_process" : "fsk7f2", + "host_id" : null, + "indicator" : "Stopped" +},] diff --git a/history/process_iicy7w.json b/history/process_iicy7w.json new file mode 100644 index 0000000..20ccb11 --- /dev/null +++ b/history/process_iicy7w.json @@ -0,0 +1,211 @@ +[{ + "history_id" : "7wczndrdr2h", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/\"Map_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656459810867, + "history_end_time" : 1656460094816, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "gbutef3h8ad", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/\"Map_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656455485471, + "history_end_time" : 1656455761971, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "hmz4r94bwgu", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656454294420, + "history_end_time" : 1656454573066, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "71vxoc5s3s6", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\n(0)\tgsn_open_wks: Error: directory '/groups/ESS/aalnaim/cmaq/plots/' does not exist or lacks write permissions.\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/testPlot*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Map_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656454195167, + "history_end_time" : 1656454202119, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "gopwj98w0pu", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nwarning:_NclOpenFile: cannot open file ; No such file or directory\n\n\nfatal:file (cdf_file1) isn't defined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 26 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/testPlot*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Map_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656428429051, + "history_end_time" : 1656428434766, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "325HEkx1Krvs", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : "./geoweaver-325HEkx1Krvs.sh: line 14: /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl: No space left on device\n Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal: can't find file \"/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\"\n\n\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/testPlot*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Map_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\nrm: cannot remove '/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl': No such file or directory\n", + "history_begin_time" : 1656358129479, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "D5UJPEQKTMyg", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=2022-06-12 #This needs to be auto date\nexport eddate_post=2022-06-13 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656035297411, + "history_end_time" : 1656318394652, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "XOIlvRsdYnI8", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=2022-06-12 #This needs to be auto date\nexport eddate_post=2022-06-13 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656035022277, + "history_end_time" : 1656318394053, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "4XGJCKodBvLo", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1655841852369, + "history_end_time" : 1656318392984, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "p6xFI6fWA4Fb", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\n", + "history_begin_time" : 1655751752987, + "history_end_time" : 1655841842683, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "ne47m6FAeJxU", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nwarning:_NclOpenFile: cannot open file ; No such file or directory\n\n\nfatal:file (cdf_file1) isn't defined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 26 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_12.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_13.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_14.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_15.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_16.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_17.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_18.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_19.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_20.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_21.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_22.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_23.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_00.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_01.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_02.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_03.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_04.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_05.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_06.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_07.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_08.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_09.png' @ error/composite.c/CompositeImageCommand/1597.\n", + "history_begin_time" : 1655751590641, + "history_end_time" : 1655841842215, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "jTxew37ZEHZx", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nwarning:_NclOpenFile: cannot open file ; No such file or directory\n\n\nfatal:file (cdf_file1) isn't defined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 26 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_12.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_13.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_14.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_15.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_16.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_17.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_18.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_19.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_20.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_21.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_22.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-13_23.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_00.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_01.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_02.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_03.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_04.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_05.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_06.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_07.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_08.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_09.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_10.png' @ error/composite.c/CompositeImageCommand/1597.\ncomposite: unable to open image `mason-logo-green.png': No such file or directory @ error/blob.c/OpenBlob/2881.\ncomposite: missing an image filename `/groups/ESS/aalnaim/cmaq/plots/testPlot_2022-06-14_11.png' @ error/composite.c/CompositeImageCommand/1597.\n", + "history_begin_time" : 1655750118130, + "history_end_time" : 1655751684565, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "iYyOqlu7KjSV", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nwarning:_NclOpenFile: cannot open file ; No such file or directory\n\n\nfatal:file (cdf_file1) isn't defined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 26 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n", + "history_begin_time" : 1655750075447, + "history_end_time" : 1655751684011, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "br1dzLyS6NZy", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:Could not open (/lib/ncarg/nclscripts/csm/gsn_code.ncl)\n\nfatal:error at line 1 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n", + "history_begin_time" : 1655750045227, + "history_end_time" : 1655751683402, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "qPU8rX3zzdLt", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:Could not open (/lib/ncarg/nclscripts/csm/gsn_code.ncl)\n\nfatal:error at line 1 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n", + "history_begin_time" : 1655749989581, + "history_end_time" : 1655751682516, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "DA9fnFn5PWKR", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\n\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\n\nmodule load ncl\nsleep 20\nncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:Could not open (/lib/ncarg/nclscripts/csm/gsn_code.ncl)\n\nfatal:error at line 1 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n", + "history_begin_time" : 1655749591380, + "history_end_time" : 1655751681962, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "9jaQMmHmWFTD", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\n\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\n\nmodule load ncl; ncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:Could not open (/lib/ncarg/nclscripts/csm/gsn_code.ncl)\n\nfatal:error at line 1 in file /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\n\n\n", + "history_begin_time" : 1655749243811, + "history_end_time" : 1655751681465, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "UOeBZpuyWs4F", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\n\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\n\noutput='module load ncl; ncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl'\n\nif output; then\n echo 'Images generated successfully\\n'\nelse\n echo 'ncl script failed\\n'\nfi\n\n", + "history_output" : "./geoweaver-UOeBZpuyWs4F.sh: line 160: output: command not found\nncl script failed\\n\n", + "history_begin_time" : 1655749140593, + "history_end_time" : 1655751680614, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "mwE7drOv1WBb", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613\nexport stdate_post=2022-06-13\nexport eddate_post=2022-06-14\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\n\n\ncat <>/groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\nmodule load ncl\n\noutput='ncl /groups/ESS/aalnaim/cmaq/plot_daily_O3.ncl'\n\nif output; then\n echo 'Images generated successfully\\n'\nelse\n echo 'ncl script failed\\n'\nfi\n\n", + "history_output" : "./geoweaver-mwE7drOv1WBb.sh: line 160: output: command not found\nncl script failed\\n\n", + "history_begin_time" : 1655749025638, + "history_end_time" : 1655751680103, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "99qp90f5zc2", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968050, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "b52wrb6e5b0", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485892, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},] diff --git a/history/process_is1w3m.json b/history/process_is1w3m.json new file mode 100644 index 0000000..0531384 --- /dev/null +++ b/history/process_is1w3m.json @@ -0,0 +1,221 @@ +[{ + "history_id" : "3m8h86qnfzm", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/\"Airnow_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nfatal:asciiread: Unable to open input file (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X/AQF5X_Hourly_2022062512.dat)\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 9684 in file /opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\n\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 9766 in file /opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\n\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 159 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_20220625.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656459811268, + "history_end_time" : 1656459817777, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "7gulavmvp4p", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y-%m-%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/\"Airnow_\"$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656455485201, + "history_end_time" : 1656455769632, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "9a4e7z9gi53", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454576091, + "history_end_time" : 1656454858998, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "hr2rrx85yut", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454294901, + "history_end_time" : 1656454575582, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "1mafcnesnf9", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\n(0)\tgsn_open_wks: Error: directory '/groups/ESS/aalnaim/cmaq/plots/' does not exist or lacks write permissions.\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454210163, + "history_end_time" : 1656454214584, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "qp3u62b701h", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\n(0)\tgsn_open_wks: Error: directory '/groups/ESS/aalnaim/cmaq/plots/' does not exist or lacks write permissions.\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454202290, + "history_end_time" : 1656454210067, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "LSsBAoouuOGp", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : "./geoweaver-LSsBAoouuOGp.sh: line 17: /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl: No space left on device\n Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal: can't find file \"/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\"\n\n\nconvert: unable to open image `/groups/ESS/aalnaim/cmaq/plots/OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `/groups/ESS/aalnaim/cmaq/plots/Airnow_.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\nrm: cannot remove '/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl': No such file or directory\n", + "history_begin_time" : 1656358167405, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "uQcHolUOgWAm", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport stdate_post=2022-06-12 #This needs to be auto date\nexport eddate_post=2022-06-13 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t318.4477\n(0)\t267.5703\n(0)\t293.6953\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656021723030, + "history_end_time" : 1656318425050, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "3ngpslP4stwV", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t315.22\n(0)\t263.5478\n(0)\t293.0772\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t315.22\n(0)\t263.5478\n(0)\t293.0772\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061212_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061212_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061212_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061213_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061213_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061213_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061214_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061214_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061214_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061215_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061215_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061215_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061216_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061216_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061216_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061217_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061217_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061217_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061218_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061218_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061218_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061219_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061219_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061219_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061220_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061220_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061220_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061221_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061221_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061221_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061222_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061222_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061222_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061223_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061223_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061223_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061300_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061300_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061300_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061301_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061301_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061301_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061302_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061302_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061302_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061303_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061303_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061303_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061304_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061304_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061304_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061305_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061305_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061305_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061306_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061306_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061306_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061307_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061307_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061307_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061308_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061308_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061308_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061309_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061309_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061309_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061310_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061310_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061310_contour at this time\n\nwarning:txFont is not a valid resource in map at this time\n\nwarning:gsnDraw is not a valid resource in map at this time\n\nwarning:gsnFrame is not a valid resource in map at this time\n\nwarning:gsnMaximize is not a valid resource in map at this time\n\nwarning:gsnLeftString is not a valid resource in map at this time\n\nwarning:gsnRightString is not a valid resource in map at this time\n\nwarning:txFont is not a valid resource in /OBS-FORECAST_O3_2022061311_contour at this time\n\nwarning:gsnLeftString is not a valid resource in /OBS-FORECAST_O3_2022061311_contour at this time\n\nwarning:gsnRightString is not a valid resource in /OBS-FORECAST_O3_2022061311_contour at this time\n\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655842272330, + "history_end_time" : 1656318424584, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "YNHDn9kOFdkZ", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_20220612_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 OBS*.png Airnow_$YYYYMMDD_POST.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t315.22\n(0)\t263.5478\n(0)\t293.0772\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nconvert: unable to open image `OBS*.png': No such file or directory @ error/blob.c/OpenBlob/2881.\nconvert: no images defined `Airnow_20220613.gif' @ error/convert.c/ConvertImageCommand/3226.\nGenerating AirNow images/gif Failed!\n", + "history_begin_time" : 1655836870327, + "history_end_time" : 1656318423569, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "RT2Xn52NHxsQ", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t318.4477\n(0)\t267.5703\n(0)\t293.6953\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655773051626, + "history_end_time" : 1656318423096, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "uWNqvwKjwhZ4", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\nprint(max(co))\nprint(min(co))\nprint(avg(co))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t318.4477\n(0)\t267.5703\n(0)\t293.6953\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nfatal:Variable (co) is undefined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 50 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655772983266, + "history_end_time" : 1656318422524, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "CQQfesZ58ol8", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;no2 = cdf_file1->NO2(:,0,:,:)\n;co = cdf_file1->CO(:,0,:,:)\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\nprint(max(co))\nprint(min(co))\nprint(avg(co))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t318.4477\n(0)\t267.5703\n(0)\t293.6953\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nfatal:Variable (co) is undefined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 53 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655772942906, + "history_end_time" : 1656318422071, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "tAaCsxUHI1xm", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220612 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;no2 = cdf_file1->NO2(:,0,:,:)\n;co = cdf_file1->CO(:,0,:,:)\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\nprint(max(co))\nprint(min(co))\nprint(avg(co))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nno2@lat2d = lat\nno2@lon2d = lon\nno2@unit = \"ppbv\"\nco@lat2d = lat\nco@lon2d = lon\nco@unit = \"ppbv\"\npm25@lat2d = lat\npm25@lon2d = lon\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n NO2_obs = stringtofloat(str_get_field(site,5,\",\"))\n CO_obs = stringtofloat(str_get_field(site,6,\",\"))\n PM25_obs = stringtofloat(str_get_field(site,7,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n obsNO2 = NO2_obs(:,0)\n obsCO = CO_obs(:,0)*1000 ;ppb\n obsPM25 = PM25_obs(:,0)\n\n\n npts = nrows(0)\n\n do ii = 0, npts-1\n if(obsCO(ii) .le. 0) then\n obsCO(ii) = -999.0\n end if\n end do\n\n obsO3@_FillValue = -999.\n obsNO2@_FillValue = -999.\n obsCO@_FillValue = -999.\n obsPM25@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n levels_NO2 = ispan(0,40,2)\n levels_CO = ispan(0,1000,50)\n levels_PM25 = (/0,2,4,6,8,10,12,14,16,18,20,25,30,35,40,50,60,70,80,90,100/) ;ispan(0,100,5)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n lat_NO2 = new((/num_distinct_markers,npts/),float)\n lon_NO2 = new((/num_distinct_markers,npts/),float)\n lat_NO2 = -999\n lon_NO2 = -999\n\n lat_CO = new((/num_distinct_markers,npts/),float)\n lon_CO = new((/num_distinct_markers,npts/),float)\n lat_CO = -999\n lon_CO = -999\n\n lat_PM25 = new((/num_distinct_markers,npts/),float)\n lon_PM25 = new((/num_distinct_markers,npts/),float)\n lat_PM25 = -999\n lon_PM25 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n indexes_NO2 = ind(obsNO2(:).lt.levels_NO2(0))\n indexes_CO = ind(obsCO(:).lt.levels_CO(0))\n indexes_PM25 = ind(obsPM25(:).lt.levels_PM25(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n indexes_NO2 = ind(obsNO2(:).ge.max(levels_NO2))\n indexes_CO = ind(obsCO(:).ge.max(levels_CO))\n indexes_PM25 = ind(obsPM25(:).ge.max(levels_PM25))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n indexes_NO2 = ind(obsNO2(:).ge.levels_NO2(i-1).and.obsNO2(:).lt.levels_NO2(i))\n indexes_CO = ind(obsCO(:).ge.levels_CO(i-1).and.obsCO(:).lt.levels_CO(i))\n indexes_PM25 = ind(obsPM25(:).ge.levels_PM25(i-1).and.obsPM25(:).lt.levels_PM25(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n ;print(indexes_O3+\" \"+indexes_CO+\" \"+indexes_NO2)\n\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n if (.not.any(ismissing(indexes_NO2))) then\n npts_range_NO2 = dimsizes(indexes_NO2) ; # of points in this range.\n lat_NO2(i,0:npts_range_NO2-1) = obslat(indexes_NO2)\n lon_NO2(i,0:npts_range_NO2-1) = obslon(indexes_NO2)\n ;print(\"NO2: \"+npts_range_NO2)\n end if\n\n if (.not.any(ismissing(indexes_CO))) then\n npts_range_CO = dimsizes(indexes_CO) ; # of points in this range.\n lat_CO(i,0:npts_range_CO-1) = obslat(indexes_CO)\n lon_CO(i,0:npts_range_CO-1) = obslon(indexes_CO)\n ;print(\"CO: \"+npts_range_CO)\n end if\n\n if (.not.any(ismissing(indexes_PM25))) then\n npts_range_PM25 = dimsizes(indexes_PM25) ; # of points in this range.\n lat_PM25(i,0:npts_range_PM25-1) = obslat(indexes_PM25)\n lon_PM25(i,0:npts_range_PM25-1) = obslon(indexes_PM25)\n ;print(\"PM25: \"+npts_range_PM25)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n delete(indexes_NO2) ; size next time.\n delete(indexes_CO)\n delete(indexes_PM25)\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n lat_NO2@_FillValue = -999\n lon_NO2@_FillValue = -999\n lat_CO@_FillValue = -999\n lon_CO@_FillValue = -999\n lat_PM25@_FillValue = -999\n lon_PM25@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot NO2\n\n pmid_NO2 = new(num_distinct_markers,graphic)\n hollow_NO2 = new(num_distinct_markers,graphic)\n pname=plot_dir+\"/OBS-FORECAST_NO2_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC NO~B~2~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 40\n cnres@cnLevelSpacingF = 2\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,no2(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_NO2(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_NO2(i,:),lat_NO2(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_NO2(i,:),lat_NO2(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_NO2)\n delete(hollow_NO2)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot CO\n\n pmid_CO = new(num_distinct_markers,graphic)\n hollow_CO = new(num_distinct_markers,graphic)\n pname=plot_dir+\"/OBS-FORECAST_CO_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n cnres@tiMainString = pdate+\" \"+runtime+\" CO (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 1000\n cnres@cnLevelSpacingF = 50\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,co(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_CO(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_CO(i,:),lat_CO(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_CO(i,:),lat_CO(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_CO)\n delete(hollow_CO)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot PM2.5\n pname=plot_dir+\"/OBS-FORECAST_PM25_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC PM~B~2.5~N~ (ug/m~S~3~N~)\"; ~C~ CMAQ with GBBEPx Fire Emissions\n; cnres@cnLevelSelectionMode = \"ManualLevels\"\n; cnres@cnMinLevelValF = 0.\n; cnres@cnMaxLevelValF = 100\n; cnres@cnLevelSpacingF = 5\n cnres@cnLevelSelectionMode = \"ExplicitLevels\"\n cnres@cnLevels = (/0,2,4,6,8,\\\n 10,12,14,16,18,\\\n 20,25,30,35,40,\\\n 50,60,70,80,90,\\\n 100/)\n\n ;plot = gsn_csm_contour_map(wks,pm25(it,:,:),res)\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,pm25(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_PM25(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_PM25(i,:),lat_PM25(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_PM25(i,:),lat_PM25(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(NO2_obs)\n delete(CO_obs)\n delete(PM25_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete(obsNO2)\n delete(obsCO)\n delete(obsPM25)\n delete([/lon_O3,lon_NO2,lon_CO,lon_PM25,lat_O3,lat_NO2,lat_CO,lat_PM25/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t318.4477\n(0)\t267.5703\n(0)\t293.6953\n(0)\t24 265 442\n(0)\t86.04666666666668\n(0)\t6.15\n(0)\t31.61014393784576\nfatal:Variable (co) is undefined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 53 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655772617245, + "history_end_time" : 1656318421502, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "0dBhrqkuw3fA", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;no2 = cdf_file1->NO2(:,0,:,:)\n;co = cdf_file1->CO(:,0,:,:)\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\nprint(max(co))\nprint(min(co))\nprint(avg(co))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nno2@lat2d = lat\nno2@lon2d = lon\nno2@unit = \"ppbv\"\nco@lat2d = lat\nco@lon2d = lon\nco@unit = \"ppbv\"\npm25@lat2d = lat\npm25@lon2d = lon\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n NO2_obs = stringtofloat(str_get_field(site,5,\",\"))\n CO_obs = stringtofloat(str_get_field(site,6,\",\"))\n PM25_obs = stringtofloat(str_get_field(site,7,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n obsNO2 = NO2_obs(:,0)\n obsCO = CO_obs(:,0)*1000 ;ppb\n obsPM25 = PM25_obs(:,0)\n\n\n npts = nrows(0)\n\n do ii = 0, npts-1\n if(obsCO(ii) .le. 0) then\n obsCO(ii) = -999.0\n end if\n end do\n\n obsO3@_FillValue = -999.\n obsNO2@_FillValue = -999.\n obsCO@_FillValue = -999.\n obsPM25@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n levels_NO2 = ispan(0,40,2)\n levels_CO = ispan(0,1000,50)\n levels_PM25 = (/0,2,4,6,8,10,12,14,16,18,20,25,30,35,40,50,60,70,80,90,100/) ;ispan(0,100,5)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n lat_NO2 = new((/num_distinct_markers,npts/),float)\n lon_NO2 = new((/num_distinct_markers,npts/),float)\n lat_NO2 = -999\n lon_NO2 = -999\n\n lat_CO = new((/num_distinct_markers,npts/),float)\n lon_CO = new((/num_distinct_markers,npts/),float)\n lat_CO = -999\n lon_CO = -999\n\n lat_PM25 = new((/num_distinct_markers,npts/),float)\n lon_PM25 = new((/num_distinct_markers,npts/),float)\n lat_PM25 = -999\n lon_PM25 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n indexes_NO2 = ind(obsNO2(:).lt.levels_NO2(0))\n indexes_CO = ind(obsCO(:).lt.levels_CO(0))\n indexes_PM25 = ind(obsPM25(:).lt.levels_PM25(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n indexes_NO2 = ind(obsNO2(:).ge.max(levels_NO2))\n indexes_CO = ind(obsCO(:).ge.max(levels_CO))\n indexes_PM25 = ind(obsPM25(:).ge.max(levels_PM25))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n indexes_NO2 = ind(obsNO2(:).ge.levels_NO2(i-1).and.obsNO2(:).lt.levels_NO2(i))\n indexes_CO = ind(obsCO(:).ge.levels_CO(i-1).and.obsCO(:).lt.levels_CO(i))\n indexes_PM25 = ind(obsPM25(:).ge.levels_PM25(i-1).and.obsPM25(:).lt.levels_PM25(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n ;print(indexes_O3+\" \"+indexes_CO+\" \"+indexes_NO2)\n\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n if (.not.any(ismissing(indexes_NO2))) then\n npts_range_NO2 = dimsizes(indexes_NO2) ; # of points in this range.\n lat_NO2(i,0:npts_range_NO2-1) = obslat(indexes_NO2)\n lon_NO2(i,0:npts_range_NO2-1) = obslon(indexes_NO2)\n ;print(\"NO2: \"+npts_range_NO2)\n end if\n\n if (.not.any(ismissing(indexes_CO))) then\n npts_range_CO = dimsizes(indexes_CO) ; # of points in this range.\n lat_CO(i,0:npts_range_CO-1) = obslat(indexes_CO)\n lon_CO(i,0:npts_range_CO-1) = obslon(indexes_CO)\n ;print(\"CO: \"+npts_range_CO)\n end if\n\n if (.not.any(ismissing(indexes_PM25))) then\n npts_range_PM25 = dimsizes(indexes_PM25) ; # of points in this range.\n lat_PM25(i,0:npts_range_PM25-1) = obslat(indexes_PM25)\n lon_PM25(i,0:npts_range_PM25-1) = obslon(indexes_PM25)\n ;print(\"PM25: \"+npts_range_PM25)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n delete(indexes_NO2) ; size next time.\n delete(indexes_CO)\n delete(indexes_PM25)\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n lat_NO2@_FillValue = -999\n lon_NO2@_FillValue = -999\n lat_CO@_FillValue = -999\n lon_CO@_FillValue = -999\n lat_PM25@_FillValue = -999\n lon_PM25@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot NO2\n\n pmid_NO2 = new(num_distinct_markers,graphic)\n hollow_NO2 = new(num_distinct_markers,graphic)\n pname=plot_dir+\"/OBS-FORECAST_NO2_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC NO~B~2~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 40\n cnres@cnLevelSpacingF = 2\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,no2(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_NO2(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_NO2(i,:),lat_NO2(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_NO2(i,:),lat_NO2(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_NO2)\n delete(hollow_NO2)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot CO\n\n pmid_CO = new(num_distinct_markers,graphic)\n hollow_CO = new(num_distinct_markers,graphic)\n pname=plot_dir+\"/OBS-FORECAST_CO_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n cnres@tiMainString = pdate+\" \"+runtime+\" CO (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 1000\n cnres@cnLevelSpacingF = 50\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,co(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_CO(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_CO(i,:),lat_CO(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_CO(i,:),lat_CO(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_CO)\n delete(hollow_CO)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot PM2.5\n pname=plot_dir+\"/OBS-FORECAST_PM25_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC PM~B~2.5~N~ (ug/m~S~3~N~)\"; ~C~ CMAQ with GBBEPx Fire Emissions\n; cnres@cnLevelSelectionMode = \"ManualLevels\"\n; cnres@cnMinLevelValF = 0.\n; cnres@cnMaxLevelValF = 100\n; cnres@cnLevelSpacingF = 5\n cnres@cnLevelSelectionMode = \"ExplicitLevels\"\n cnres@cnLevels = (/0,2,4,6,8,\\\n 10,12,14,16,18,\\\n 20,25,30,35,40,\\\n 50,60,70,80,90,\\\n 100/)\n\n ;plot = gsn_csm_contour_map(wks,pm25(it,:,:),res)\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,pm25(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_PM25(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_PM25(i,:),lat_PM25(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_PM25(i,:),lat_PM25(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(NO2_obs)\n delete(CO_obs)\n delete(PM25_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete(obsNO2)\n delete(obsCO)\n delete(obsPM25)\n delete([/lon_O3,lon_NO2,lon_CO,lon_PM25,lat_O3,lat_NO2,lat_CO,lat_PM25/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\nwarning:_NclOpenFile: cannot open file ; No such file or directory\n\n\nfatal:file (cdf_file1) isn't defined\n\nfatal:[\"Execute.c\":8637]:Execute: Error occurred at or near line 29 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655772560298, + "history_end_time" : 1656318421037, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "DVoDXtSqoPog", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=20220613 #This needs to be auto date\nexport stdate_post=2022-06-13 #This needs to be auto date\nexport eddate_post=2022-06-14 #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") #SET! (20220613)\nd1 = getenv(\"stdate_post\") #SET! (2022-06-13)\nd2 = getenv(\"eddate_post\") #SET! (2022-06-14)\n\nobs_dir = getenv(\"obs_dir_NCL\") #SET! (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X)\nplot_dir = getenv(\"graph_dir\") #SET! (/groups/ESS/aalnaim/cmaq/plots)\n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") #SET! (/groups/ESS/aalnaim/cmaq/prediction_nc_files/)\ngrid_dir = getenv(\"mcip_dir\") #SET! (/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km)\n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;no2 = cdf_file1->NO2(:,0,:,:)\n;co = cdf_file1->CO(:,0,:,:)\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\nprint(max(co))\nprint(min(co))\nprint(avg(co))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nno2@lat2d = lat\nno2@lon2d = lon\nno2@unit = \"ppbv\"\nco@lat2d = lat\nco@lon2d = lon\nco@unit = \"ppbv\"\npm25@lat2d = lat\npm25@lon2d = lon\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n NO2_obs = stringtofloat(str_get_field(site,5,\",\"))\n CO_obs = stringtofloat(str_get_field(site,6,\",\"))\n PM25_obs = stringtofloat(str_get_field(site,7,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n obsNO2 = NO2_obs(:,0)\n obsCO = CO_obs(:,0)*1000 ;ppb\n obsPM25 = PM25_obs(:,0)\n\n\n npts = nrows(0)\n\n do ii = 0, npts-1\n if(obsCO(ii) .le. 0) then\n obsCO(ii) = -999.0\n end if\n end do\n\n obsO3@_FillValue = -999.\n obsNO2@_FillValue = -999.\n obsCO@_FillValue = -999.\n obsPM25@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n levels_NO2 = ispan(0,40,2)\n levels_CO = ispan(0,1000,50)\n levels_PM25 = (/0,2,4,6,8,10,12,14,16,18,20,25,30,35,40,50,60,70,80,90,100/) ;ispan(0,100,5)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n lat_NO2 = new((/num_distinct_markers,npts/),float)\n lon_NO2 = new((/num_distinct_markers,npts/),float)\n lat_NO2 = -999\n lon_NO2 = -999\n\n lat_CO = new((/num_distinct_markers,npts/),float)\n lon_CO = new((/num_distinct_markers,npts/),float)\n lat_CO = -999\n lon_CO = -999\n\n lat_PM25 = new((/num_distinct_markers,npts/),float)\n lon_PM25 = new((/num_distinct_markers,npts/),float)\n lat_PM25 = -999\n lon_PM25 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n indexes_NO2 = ind(obsNO2(:).lt.levels_NO2(0))\n indexes_CO = ind(obsCO(:).lt.levels_CO(0))\n indexes_PM25 = ind(obsPM25(:).lt.levels_PM25(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n indexes_NO2 = ind(obsNO2(:).ge.max(levels_NO2))\n indexes_CO = ind(obsCO(:).ge.max(levels_CO))\n indexes_PM25 = ind(obsPM25(:).ge.max(levels_PM25))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n indexes_NO2 = ind(obsNO2(:).ge.levels_NO2(i-1).and.obsNO2(:).lt.levels_NO2(i))\n indexes_CO = ind(obsCO(:).ge.levels_CO(i-1).and.obsCO(:).lt.levels_CO(i))\n indexes_PM25 = ind(obsPM25(:).ge.levels_PM25(i-1).and.obsPM25(:).lt.levels_PM25(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n ;print(indexes_O3+\" \"+indexes_CO+\" \"+indexes_NO2)\n\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n if (.not.any(ismissing(indexes_NO2))) then\n npts_range_NO2 = dimsizes(indexes_NO2) ; # of points in this range.\n lat_NO2(i,0:npts_range_NO2-1) = obslat(indexes_NO2)\n lon_NO2(i,0:npts_range_NO2-1) = obslon(indexes_NO2)\n ;print(\"NO2: \"+npts_range_NO2)\n end if\n\n if (.not.any(ismissing(indexes_CO))) then\n npts_range_CO = dimsizes(indexes_CO) ; # of points in this range.\n lat_CO(i,0:npts_range_CO-1) = obslat(indexes_CO)\n lon_CO(i,0:npts_range_CO-1) = obslon(indexes_CO)\n ;print(\"CO: \"+npts_range_CO)\n end if\n\n if (.not.any(ismissing(indexes_PM25))) then\n npts_range_PM25 = dimsizes(indexes_PM25) ; # of points in this range.\n lat_PM25(i,0:npts_range_PM25-1) = obslat(indexes_PM25)\n lon_PM25(i,0:npts_range_PM25-1) = obslon(indexes_PM25)\n ;print(\"PM25: \"+npts_range_PM25)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n delete(indexes_NO2) ; size next time.\n delete(indexes_CO)\n delete(indexes_PM25)\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n lat_NO2@_FillValue = -999\n lon_NO2@_FillValue = -999\n lat_CO@_FillValue = -999\n lon_CO@_FillValue = -999\n lat_PM25@_FillValue = -999\n lon_PM25@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot NO2\n\n pmid_NO2 = new(num_distinct_markers,graphic)\n hollow_NO2 = new(num_distinct_markers,graphic)\n pname=plot_dir+\"/OBS-FORECAST_NO2_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC NO~B~2~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 40\n cnres@cnLevelSpacingF = 2\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,no2(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_NO2(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_NO2(i,:),lat_NO2(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_NO2(i,:),lat_NO2(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_NO2)\n delete(hollow_NO2)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot CO\n\n pmid_CO = new(num_distinct_markers,graphic)\n hollow_CO = new(num_distinct_markers,graphic)\n pname=plot_dir+\"/OBS-FORECAST_CO_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n cnres@tiMainString = pdate+\" \"+runtime+\" CO (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 1000\n cnres@cnLevelSpacingF = 50\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,co(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_CO(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_CO(i,:),lat_CO(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_CO(i,:),lat_CO(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_CO)\n delete(hollow_CO)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n;;;;;;;;; Plot PM2.5\n pname=plot_dir+\"/OBS-FORECAST_PM25_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC PM~B~2.5~N~ (ug/m~S~3~N~)\"; ~C~ CMAQ with GBBEPx Fire Emissions\n; cnres@cnLevelSelectionMode = \"ManualLevels\"\n; cnres@cnMinLevelValF = 0.\n; cnres@cnMaxLevelValF = 100\n; cnres@cnLevelSpacingF = 5\n cnres@cnLevelSelectionMode = \"ExplicitLevels\"\n cnres@cnLevels = (/0,2,4,6,8,\\\n 10,12,14,16,18,\\\n 20,25,30,35,40,\\\n 50,60,70,80,90,\\\n 100/)\n\n ;plot = gsn_csm_contour_map(wks,pm25(it,:,:),res)\n\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,pm25(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_PM25(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_PM25(i,:),lat_PM25(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_PM25(i,:),lat_PM25(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(NO2_obs)\n delete(CO_obs)\n delete(PM25_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete(obsNO2)\n delete(obsCO)\n delete(obsPM25)\n delete([/lon_O3,lon_NO2,lon_CO,lon_PM25,lat_O3,lat_NO2,lat_CO,lat_PM25/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\nfatal:syntax error: line 15 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl before or near ) \nobs_dir = getenv(\"obs_dir_NCL\") #SET! (/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X)\n-----------------------------------------------------------------------------------------------^\n\n\nfatal:syntax error: line 16 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl before or near ) \nplot_dir = getenv(\"graph_dir\") #SET! (/groups/ESS/aalnaim/cmaq/plots)\n--------------------------------------------------------------------^\n\n\nfatal:syntax error: line 23 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl before or near km \ngrid_dir = getenv(\"mcip_dir\") #SET! (/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\n----------------------------------------------------------------------------------------------^\n\n\nfatal:Syntax Error in block, block not executed\n\nfatal:error at line 500 in file /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\n\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1655772499290, + "history_end_time" : 1656318420478, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "80uk9m9po5w", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656428429367, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "vf16mbs2yqi", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656428429374, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "71qdytphclv", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968055, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "qc0f1msqrxb", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656450968059, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "7sp4kvy7u51", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485898, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "mmqmr29btkk", + "history_input" : null, + "history_output" : null, + "history_begin_time" : null, + "history_end_time" : 1656452485903, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},] diff --git a/history/process_l8vlic.json b/history/process_l8vlic.json new file mode 100644 index 0000000..1b4aba3 --- /dev/null +++ b/history/process_l8vlic.json @@ -0,0 +1,381 @@ +[{ + "history_id" : "5QrVV4w4VsKQ", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022062612 ... 6 26 12\n1 21.855751 -120.512500 2022062612 ... 6 26 12\n2 21.882309 -120.404144 2022062612 ... 6 26 12\n3 21.908745 -120.295715 2022062612 ... 6 26 12\n4 21.935051 -120.187225 2022062612 ... 6 26 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1656452542800, + "history_end_time" : 1656454945624, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "gxs47n3fu63", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022062612 ... 6 26 12\n1 21.855751 -120.512500 2022062612 ... 6 26 12\n2 21.882309 -120.404144 2022062612 ... 6 26 12\n3 21.908745 -120.295715 2022062612 ... 6 26 12\n4 21.935051 -120.187225 2022062612 ... 6 26 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1656451082067, + "history_end_time" : 1656452485876, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "qaas9oll99a", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022062612 ... 6 26 12\n1 21.855751 -120.512500 2022062612 ... 6 26 12\n2 21.882309 -120.404144 2022062612 ... 6 26 12\n3 21.908745 -120.295715 2022062612 ... 6 26 12\n4 21.935051 -120.187225 2022062612 ... 6 26 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1656428855032, + "history_end_time" : 1656450968023, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "n6lv58mihra", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "bash: python: command not found\n", + "history_begin_time" : 1656428421513, + "history_end_time" : 1656428429351, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "oqammci4uiB0", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061512 ... 6 15 12\n1 21.855751 -120.512500 2022061512 ... 6 15 12\n2 21.882309 -120.404144 2022061512 ... 6 15 12\n3 21.908745 -120.295715 2022061512 ... 6 15 12\n4 21.935051 -120.187225 2022061512 ... 6 15 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1655491234180, + "history_end_time" : 1655493655877, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "6r2vEWyuxllb", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061312 ... 6 13 12\n1 21.855751 -120.512500 2022061312 ... 6 13 12\n2 21.882309 -120.404144 2022061312 ... 6 13 12\n3 21.908745 -120.295715 2022061312 ... 6 13 12\n4 21.935051 -120.187225 2022061312 ... 6 13 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1655311137931, + "history_end_time" : 1655492565567, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "MPLxqZyo112P", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061312 ... 6 13 12\n1 21.855751 -120.512500 2022061312 ... 6 13 12\n2 21.882309 -120.404144 2022061312 ... 6 13 12\n3 21.908745 -120.295715 2022061312 ... 6 13 12\n4 21.935051 -120.187225 2022061312 ... 6 13 12\n\n[5 rows x 18 columns]\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py:493: FutureWarning: The feature names should match those that were passed during fit. Starting version 1.2, an error will be raised.\nFeature names unseen at fit time:\n- Latitude\n- Longitude\nFeature names must be in the same order as they were in fit.\n\n warnings.warn(message, FutureWarning)\nTraceback (most recent call last):\n File \"rf_prediction.py\", line 23, in \n pred = loaded_model.predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 971, in predict\n X = self._validate_X_predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 579, in _validate_X_predict\n X = self._validate_data(X, dtype=DTYPE, accept_sparse=\"csr\", reset=False)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 585, in _validate_data\n self._check_n_features(X, reset=reset)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 400, in _check_n_features\n raise ValueError(\nValueError: X has 17 features, but RandomForestRegressor is expecting 15 features as input.\n", + "history_begin_time" : 1655310092117, + "history_end_time" : 1655311068120, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "5QklzkhXD2lo", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rfOLD_Jun13.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf_Jun13.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061312 ... 6 13 12\n1 21.855751 -120.512500 2022061312 ... 6 13 12\n2 21.882309 -120.404144 2022061312 ... 6 13 12\n3 21.908745 -120.295715 2022061312 ... 6 13 12\n4 21.935051 -120.187225 2022061312 ... 6 13 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1655309378321, + "history_end_time" : 1655310039319, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "jKfnh1bzYdvK", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061312 ... 6 13 12\n1 21.855751 -120.512500 2022061312 ... 6 13 12\n2 21.882309 -120.404144 2022061312 ... 6 13 12\n3 21.908745 -120.295715 2022061312 ... 6 13 12\n4 21.935051 -120.187225 2022061312 ... 6 13 12\n\n[5 rows x 18 columns]\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py:493: FutureWarning: The feature names should match those that were passed during fit. Starting version 1.2, an error will be raised.\nFeature names unseen at fit time:\n- Latitude\n- Longitude\nFeature names seen at fit time, yet now missing:\n- Latitude_x\n- Longitude_x\n\n warnings.warn(message, FutureWarning)\n", + "history_begin_time" : 1655279905004, + "history_end_time" : 1655309895304, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "BgX94mH3kPcU", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061312 ... 6 13 12\n1 21.855751 -120.512500 2022061312 ... 6 13 12\n2 21.882309 -120.404144 2022061312 ... 6 13 12\n3 21.908745 -120.295715 2022061312 ... 6 13 12\n4 21.935051 -120.187225 2022061312 ... 6 13 12\n\n[5 rows x 18 columns]\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py:493: FutureWarning: The feature names should match those that were passed during fit. Starting version 1.2, an error will be raised.\nFeature names seen at fit time, yet now missing:\n- Latitude_x\n- Longitude_x\n\n warnings.warn(message, FutureWarning)\nTraceback (most recent call last):\n File \"rf_prediction.py\", line 23, in \n pred = loaded_model.predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 971, in predict\n X = self._validate_X_predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 579, in _validate_X_predict\n X = self._validate_data(X, dtype=DTYPE, accept_sparse=\"csr\", reset=False)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 585, in _validate_data\n self._check_n_features(X, reset=reset)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 400, in _check_n_features\n raise ValueError(\nValueError: X has 15 features, but RandomForestRegressor is expecting 17 features as input.\n", + "history_begin_time" : 1655279024676, + "history_end_time" : 1655279909948, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "YI8iHBCppmyK", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(\"/groups/ESS/aalnaim/cmaq/testing.csv\")\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061312 ... 6 13 12\n1 21.855751 -120.512500 2022061312 ... 6 13 12\n2 21.882309 -120.404144 2022061312 ... 6 13 12\n3 21.908745 -120.295715 2022061312 ... 6 13 12\n4 21.935051 -120.187225 2022061312 ... 6 13 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1655277249119, + "history_end_time" : 1655279024700, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "i95jG9JFwa8J", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061012 ... 6 10 12\n1 21.855751 -120.512500 2022061012 ... 6 10 12\n2 21.882309 -120.404144 2022061012 ... 6 10 12\n3 21.908745 -120.295715 2022061012 ... 6 10 12\n4 21.935051 -120.187225 2022061012 ... 6 10 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1655251805538, + "history_end_time" : 1655255449037, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Failed" +},{ + "history_id" : "Kn0xLdWWqyIq", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022061012 ... 6 10 12\n1 21.855751 -120.512500 2022061012 ... 6 10 12\n2 21.882309 -120.404144 2022061012 ... 6 10 12\n3 21.908745 -120.295715 2022061012 ... 6 10 12\n4 21.935051 -120.187225 2022061012 ... 6 10 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1655078496366, + "history_end_time" : 1655255392546, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "kFsbWTsT979v", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH month day hours\n0 21.829086 -120.620790 2022061012 6 10 12\n1 21.855751 -120.512500 2022061012 6 10 12\n2 21.882309 -120.404144 2022061012 6 10 12\n3 21.908745 -120.295715 2022061012 6 10 12\n4 21.935051 -120.187225 2022061012 6 10 12\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py:493: FutureWarning: The feature names should match those that were passed during fit. Starting version 1.2, an error will be raised.\nFeature names seen at fit time, yet now missing:\n- CFRAC\n- CMAQ12KM_CO(ppm)\n- CMAQ12KM_NO2(ppb)\n- CMAQ12KM_O3(ppb)\n- CMAQ_OC(ug/m3)\n- ...\n\n warnings.warn(message, FutureWarning)\nTraceback (most recent call last):\n File \"rf_prediction.py\", line 23, in \n pred = loaded_model.predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 971, in predict\n X = self._validate_X_predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 579, in _validate_X_predict\n X = self._validate_data(X, dtype=DTYPE, accept_sparse=\"csr\", reset=False)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 585, in _validate_data\n self._check_n_features(X, reset=reset)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 400, in _check_n_features\n raise ValueError(\nValueError: X has 3 features, but RandomForestRegressor is expecting 15 features as input.\n", + "history_begin_time" : 1655076496157, + "history_end_time" : 1655255392079, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "uBFhExbZKHrs", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH month day hours\n0 21.829086 -120.620790 2022061012 6 10 12\n1 21.855751 -120.512500 2022061012 6 10 12\n2 21.882309 -120.404144 2022061012 6 10 12\n3 21.908745 -120.295715 2022061012 6 10 12\n4 21.935051 -120.187225 2022061012 6 10 12\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py:493: FutureWarning: The feature names should match those that were passed during fit. Starting version 1.2, an error will be raised.\nFeature names seen at fit time, yet now missing:\n- CFRAC\n- CMAQ12KM_CO(ppm)\n- CMAQ12KM_NO2(ppb)\n- CMAQ12KM_O3(ppb)\n- CMAQ_OC(ug/m3)\n- ...\n\n warnings.warn(message, FutureWarning)\nTraceback (most recent call last):\n File \"rf_prediction.py\", line 23, in \n pred = loaded_model.predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 971, in predict\n X = self._validate_X_predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 579, in _validate_X_predict\n X = self._validate_data(X, dtype=DTYPE, accept_sparse=\"csr\", reset=False)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 585, in _validate_data\n self._check_n_features(X, reset=reset)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 400, in _check_n_features\n raise ValueError(\nValueError: X has 3 features, but RandomForestRegressor is expecting 15 features as input.\n", + "history_begin_time" : 1655075418727, + "history_end_time" : 1655255391243, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "35ipqGFNDEhy", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH month day hours\n0 21.829086 -120.620790 2022061012 6 10 12\n1 21.855751 -120.512500 2022061012 6 10 12\n2 21.882309 -120.404144 2022061012 6 10 12\n3 21.908745 -120.295715 2022061012 6 10 12\n4 21.935051 -120.187225 2022061012 6 10 12\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py:493: FutureWarning: The feature names should match those that were passed during fit. Starting version 1.2, an error will be raised.\nFeature names seen at fit time, yet now missing:\n- CFRAC\n- CMAQ12KM_CO(ppm)\n- CMAQ12KM_NO2(ppb)\n- CMAQ12KM_O3(ppb)\n- CMAQ_OC(ug/m3)\n- ...\n\n warnings.warn(message, FutureWarning)\nTraceback (most recent call last):\n File \"rf_prediction.py\", line 23, in \n pred = loaded_model.predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 971, in predict\n X = self._validate_X_predict(X)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py\", line 579, in _validate_X_predict\n X = self._validate_data(X, dtype=DTYPE, accept_sparse=\"csr\", reset=False)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 585, in _validate_data\n self._check_n_features(X, reset=reset)\n File \"/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/base.py\", line 400, in _check_n_features\n raise ValueError(\nValueError: X has 3 features, but RandomForestRegressor is expecting 15 features as input.\n", + "history_begin_time" : 1655074997689, + "history_end_time" : 1655255390780, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "6wenhijl5qb", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH month day hours\n0 21.829086 -120.620790 2022061012 6 10 12\n1 21.855751 -120.512500 2022061012 6 10 12\n2 21.882309 -120.404144 2022061012 6 10 12\n3 21.908745 -120.295715 2022061012 6 10 12\n4 21.935051 -120.187225 2022061012 6 10 12\n", + "history_begin_time" : 1655073628036, + "history_end_time" : 1655255390007, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "py4wx8nmbmr", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1654535473068, + "history_end_time" : 1655255389515, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Stopped" +},{ + "history_id" : "5thy8s0bg8x", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv('/groups/ESS/aalnaim/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1654493107576, + "history_end_time" : 1654500507415, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "ng9hugawhlr", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"rf_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/testing.csv'\n", + "history_begin_time" : 1654477537808, + "history_end_time" : 1654477565919, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "6wqzknkreqq", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"rf_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 676, in parser_f\n return _read(filepath_or_buffer, kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 448, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 880, in __init__\n self._make_engine(self.engine)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1114, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1891, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas\\_libs\\parsers.pyx\", line 374, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas\\_libs\\parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] File C:\\Users\\JensenSun/cmaq/testing.csv does not exist: 'C:\\\\Users\\\\JensenSun/cmaq/testing.csv'\n", + "history_begin_time" : 1654466003063, + "history_end_time" : 1654466003607, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "100001", + "indicator" : "Failed" +},{ + "history_id" : "2vbqqwuk9ih", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"rf_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/testing.csv'\n", + "history_begin_time" : 1654456150757, + "history_end_time" : 1654456176005, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "roeaa3", + "indicator" : "Failed" +},{ + "history_id" : "9rubj8cv3oi", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1654319900667, + "history_end_time" : 1654319971009, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "vmerw3ts4jr", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"rf_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/testing.csv'\n", + "history_begin_time" : 1654317987994, + "history_end_time" : 1654318013515, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "gygji4nxsq6", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"rf_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 686, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 452, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 946, in __init__\n self._make_engine(self.engine)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 1178, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"/home/zsun/anaconda3/lib/python3.8/site-packages/pandas/io/parsers.py\", line 2008, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas/_libs/parsers.pyx\", line 382, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas/_libs/parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] No such file or directory: '/home/zsun/cmaq/testing.csv'\n", + "history_begin_time" : 1654314771215, + "history_end_time" : 1654314797695, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "roeaa3", + "indicator" : "Done" +},{ + "history_id" : "xtsxvek50qi", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1653698689637, + "history_end_time" : 1653698754828, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "apj0822g3qx", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022052712 ... 5 27 12\n1 21.855751 -120.512500 2022052712 ... 5 27 12\n2 21.882309 -120.404144 2022052712 ... 5 27 12\n3 21.908745 -120.295715 2022052712 ... 5 27 12\n4 21.935051 -120.187225 2022052712 ... 5 27 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1653698533667, + "history_end_time" : 1653698587738, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "jxbl87f97cl", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"rf_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 676, in parser_f\n return _read(filepath_or_buffer, kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 448, in _read\n parser = TextFileReader(fp_or_buf, **kwds)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 880, in __init__\n self._make_engine(self.engine)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1114, in _make_engine\n self._engine = CParserWrapper(self.f, **self.options)\n File \"C:\\Python37\\lib\\site-packages\\pandas\\io\\parsers.py\", line 1891, in __init__\n self._reader = parsers.TextReader(src, **kwds)\n File \"pandas\\_libs\\parsers.pyx\", line 374, in pandas._libs.parsers.TextReader.__cinit__\n File \"pandas\\_libs\\parsers.pyx\", line 674, in pandas._libs.parsers.TextReader._setup_parser_source\nFileNotFoundError: [Errno 2] File C:\\Users\\JensenSun/cmaq/testing.csv does not exist: 'C:\\\\Users\\\\JensenSun/cmaq/testing.csv'\n", + "history_begin_time" : 1652934720109, + "history_end_time" : 1652934720613, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "100001", + "indicator" : "Done" +},{ + "history_id" : "upczcif6qx6", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022051712 ... 5 17 12\n1 21.855751 -120.512500 2022051712 ... 5 17 12\n2 21.882309 -120.404144 2022051712 ... 5 17 12\n3 21.908745 -120.295715 2022051712 ... 5 17 12\n4 21.935051 -120.187225 2022051712 ... 5 17 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1652832357494, + "history_end_time" : 1652832428017, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "0xbiwyu7zda", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022051712 ... 5 17 12\n1 21.855751 -120.512500 2022051712 ... 5 17 12\n2 21.882309 -120.404144 2022051712 ... 5 17 12\n3 21.908745 -120.295715 2022051712 ... 5 17 12\n4 21.935051 -120.187225 2022051712 ... 5 17 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1652831490817, + "history_end_time" : 1652831563728, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "zM9m9TdMyPYu", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_rf.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022050912 ... 5 9 12\n1 21.855751 -120.512500 2022050912 ... 5 9 12\n2 21.882309 -120.404144 2022050912 ... 5 9 12\n3 21.908745 -120.295715 2022050912 ... 5 9 12\n4 21.935051 -120.187225 2022050912 ... 5 9 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1652798787543, + "history_end_time" : 1652798848464, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : null, + "indicator" : "Done" +},{ + "history_id" : "ncm0obwm03c", + "history_input" : null, + "history_output" : "Received unknown response code", + "history_begin_time" : 1652786144698, + "history_end_time" : 1652786145493, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Failed" +},{ + "history_id" : "adqcukrehi9", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/xgboost.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022050812 ... 5 8 12\n1 21.855751 -120.512500 2022050812 ... 5 8 12\n2 21.882309 -120.404144 2022050812 ... 5 8 12\n3 21.908745 -120.295715 2022050812 ... 5 8 12\n4 21.935051 -120.187225 2022050812 ... 5 8 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1652055341287, + "history_end_time" : 1652055417823, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "tgp2amb85h2", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nprint(final.head())\nX = final.drop(['YYYYMMDDHH','Latitude','Longitude',],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/xgboost.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv',index=False)", + "history_output" : " Latitude Longitude YYYYMMDDHH ... month day hours\n0 21.829086 -120.620790 2022050812 ... 5 8 12\n1 21.855751 -120.512500 2022050812 ... 5 8 12\n2 21.882309 -120.404144 2022050812 ... 5 8 12\n3 21.908745 -120.295715 2022050812 ... 5 8 12\n4 21.935051 -120.187225 2022050812 ... 5 8 12\n\n[5 rows x 18 columns]\nTraceback (most recent call last):\n File \"xgboost_prediction.py\", line 20, in \n loaded_model = pickle.load(open(filename, 'rb'))\nFileNotFoundError: [Errno 2] No such file or directory: '/home/aalnaim/cmaq/models/xgboost.sav'\n", + "history_begin_time" : 1652048240225, + "history_end_time" : 1652048262702, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "g269o05x5gq", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/xgboost.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"/home/mislam25/gw-workspace/g269o05x5gq/xgboost_prediction.py\", line 11, in \n final=pd.read_csv(home+'/cmaq/testing.csv')\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/util/_decorators.py\", line 311, in wrapper\n return func(*args, **kwargs)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 586, in read_csv\n return _read(filepath_or_buffer, kwds)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 482, in _read\n parser = TextFileReader(filepath_or_buffer, **kwds)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 811, in __init__\n self._engine = self._make_engine(self.engine)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/readers.py\", line 1040, in _make_engine\n return mapping[engine](self.f, **self.options) # type: ignore[call-arg]\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/c_parser_wrapper.py\", line 51, in __init__\n self._open_handles(src, kwds)\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/parsers/base_parser.py\", line 222, in _open_handles\n self.handles = get_handle(\n File \"/home/mislam25/anaconda3/lib/python3.9/site-packages/pandas/io/common.py\", line 702, in get_handle\n handle = open(\nFileNotFoundError: [Errno 2] No such file or directory: '/home/mislam25/cmaq/testing.csv'\n", + "history_begin_time" : 1650481143387, + "history_end_time" : 1650481223433, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "qv5uac976zt", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/xgboost.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv',index=False)", + "history_output" : "Traceback (most recent call last):\n File \"/home/mislam25/gw-workspace/qv5uac976zt/xgboost_prediction.py\", line 19, in \n loaded_model = pickle.load(open(filename, 'rb'))\nFileNotFoundError: [Errno 2] No such file or directory: '/home/mislam25/cmaq/models/xgboost.sav'\n", + "history_begin_time" : 1650474083377, + "history_end_time" : 1650474160388, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "2vxwx3h42ee", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/xgboost.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv',index=False)", + "history_output" : "", + "history_begin_time" : 1650252679749, + "history_end_time" : 1650252771890, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "3wgogh", + "indicator" : "Done" +},{ + "history_id" : "t4tpre1ajaz", + "history_input" : "\n# Importing necessary libraries\nimport pandas as pd\nimport pickle\nfrom pathlib import Path\nfrom time import sleep\n\n# home directory\nhome = str(Path.home())\n# importing data\nfinal=pd.read_csv(home+'/cmaq/testing.csv')\nX = final.drop(['YYYYMMDDHH'],axis=1)\n# defining testing variables\n# processing test data\n\n# load the model from disk\nfilename = home+'/cmaq/models/xgboost.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\nloaded_model = pickle.load(open(filename, 'rb'))\n\n# making prediction\npred = loaded_model.predict(X)\n\n# adding prediction values to test dataset\nfinal['prediction'] = pred.tolist()\n\nfinal = final[['Latitude', 'Longitude','YYYYMMDDHH','prediction']]\n# saving the dataset into local drive\nfinal.to_csv(home+'/cmaq/prediction_files/prediction_xgboost.csv',index=False)", + "history_output" : "", + "history_begin_time" : 1650215249428, + "history_end_time" : 1650215334005, + "history_notes" : null, + "history_process" : "l8vlic", + "host_id" : "3wgogh", + "indicator" : "Done" +},] diff --git a/history/process_rjm4qm.json b/history/process_rjm4qm.json new file mode 100644 index 0000000..6f1bd05 --- /dev/null +++ b/history/process_rjm4qm.json @@ -0,0 +1,21 @@ +[{ + "history_id" : "xwxSkHiU0RZ3", + "history_input" : "#!/bin/bash\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268228\n", + "history_begin_time" : 1654536387546, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "rjm4qm", + "host_id" : null, + "indicator" : "Running" +},{ + "history_id" : "0dbnqvd4erk", + "history_input" : "#!/bin/bash\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268227\n", + "history_begin_time" : 1654535465938, + "history_end_time" : 1654535470564, + "history_notes" : null, + "history_process" : "rjm4qm", + "host_id" : "p6wvf2", + "indicator" : "Done" +},] diff --git a/history/process_wny2dz.json b/history/process_wny2dz.json new file mode 100644 index 0000000..7112d81 --- /dev/null +++ b/history/process_wny2dz.json @@ -0,0 +1,371 @@ +[{ + "history_id" : "JgKAzkETUJbG", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf_Jun14.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 317724\n", + "history_begin_time" : 1655309069523, + "history_end_time" : 1656319034768, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "QpVpvZwzKKd8", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 317275\n", + "history_begin_time" : 1655240792433, + "history_end_time" : 1655308889463, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "YiUac61u0NQs", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 317274\n", + "history_begin_time" : 1655240750532, + "history_end_time" : 1655308888978, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "06xaefcrFtgo", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3', 'CMAQ12KM_O3(ppb)', 'Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3', 'CMAQ12KM_O3(ppb)']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 309474\n", + "history_begin_time" : 1655211911278, + "history_end_time" : 1655214661551, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "iLhFC3pj2OCm", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3', 'CMAQ12KM_O3(ppb)', 'Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3', 'CMAQ12KM_O3(ppb)']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 309359\n", + "history_begin_time" : 1655176898978, + "history_end_time" : 1655214662184, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "vo4duplBk62q", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3', 'CMAQ12KM_O3(ppb)', 'Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3', 'CMAQ12KM_O3(ppb)']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 309343\n", + "history_begin_time" : 1655174481120, + "history_end_time" : 1655214662658, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "W0OalQdYgJKf", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3', 'CMAQ12KM_O3(ppb)', 'Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3', 'CMAQ12KM_O3(ppb)']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 309342\n", + "history_begin_time" : 1655174426600, + "history_end_time" : 1655214663796, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "HkNaF85gIEwR", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3', 'CMAQ12KM_O3(ppb)', 'Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3', 'CMAQ12KM_O3(ppb)']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 309206\n", + "history_begin_time" : 1655161058930, + "history_end_time" : 1655214664277, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "l96UTcOSv9BW", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 294124\n", + "history_begin_time" : 1655075899025, + "history_end_time" : 1655214664758, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "baa04m11lwy", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 293687\n", + "history_begin_time" : 1655073603587, + "history_end_time" : 1655073626601, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "yDvdEvgMmIn4", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 269679\n", + "history_begin_time" : 1654612002594, + "history_end_time" : 1654619308939, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "hwrti1aWxHMv", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsleep 20", + "history_output" : "Submitted batch job 269677\n", + "history_begin_time" : 1654611898350, + "history_end_time" : 1654619308373, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "Lep1ufXcecjC", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 269675\n", + "history_begin_time" : 1654611695981, + "history_end_time" : 1654619307839, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "3xBA7r0B2HkM", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268230\n", + "history_begin_time" : 1654536599783, + "history_end_time" : 1654619307243, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "Z5kUr2I8qzhb", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nprint('IT WORKED')\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\n", + "history_output" : "Running", + "history_begin_time" : 1654536360346, + "history_end_time" : 1654536532829, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "wn2hbzhu2ay", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\n#rf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\n#pickle.dump(rf, open(filename, 'wb'))\nprint(\"IT WORKED\")\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\n", + "history_output" : "Running", + "history_begin_time" : 1654535458975, + "history_end_time" : 1654535464146, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "MsjbjCC2EwMj", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /groups/ESS/aalnaim/cmaq/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268141\n", + "history_begin_time" : 1654491910593, + "history_end_time" : 1654535259974, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "cvbKVxUSdrbt", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/groups/ESS/aalnaim/cmaq/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /home/aalnaim/rf_pyCaret.py\" >> /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm\n\nsbatch /groups/ESS/aalnaim/cmaq/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268139\n", + "history_begin_time" : 1654491776428, + "history_end_time" : 1654535260558, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "tGQb2syaWuR0", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>/home/aalnaim/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /home/aalnaim/rf_pyCaret.py\" >> /home/aalnaim/cmaq_gpu.slurm\n\nsbatch /home/aalnaim/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268137\n", + "history_begin_time" : 1654491629015, + "history_end_time" : 1654535261059, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "usex3SwJ6mTS", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\ncat <>/home/aalnaim/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /home/aalnaim/rf_pyCaret.py\" >> /home/aalnaim/cmaq_gpu.slurm\n\nsbatch /home/aalnaim/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268135\n", + "history_begin_time" : 1654491425039, + "history_end_time" : 1654535261590, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "xt19rQLKb5hk", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\necho \"# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\" >> /home/aalnaim/rf_pyCaret.py\n\npython /home/aalnaim/rf_pyCaret.py\" >> /home/aalnaim/cmaq_gpu.slurm\n\nsbatch /home/aalnaim/cmaq_gpu.slurm", + "history_output" : "#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\necho # Write first python in Geoweaver# NASA GEOWEAVER\nimport: unable to open X server `' @ error/import.c/ImportImageCommand/344.\nimport: unable to open X server `' @ error/import.c/ImportImageCommand/344.\n./geoweaver-xt19rQLKb5hk.sh: line 25: from: command not found\n./geoweaver-xt19rQLKb5hk.sh: line 26: from: command not found\nimport: unable to open X server `' @ error/import.c/ImportImageCommand/344.\n./geoweaver-xt19rQLKb5hk.sh: line 28: from: command not found\n./geoweaver-xt19rQLKb5hk.sh: line 31: syntax error near unexpected token `('\n./geoweaver-xt19rQLKb5hk.sh: line 31: `home = str(Path.home())'\n", + "history_begin_time" : 1654491367280, + "history_end_time" : 1654491394414, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "q6ogNuO8i4ck", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\n\nmodule load python\ncat <>/home/aalnaim/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /home/aalnaim/rf_pyCaret.py\" >> /home/aalnaim/cmaq_gpu.slurm\n\nsbatch /home/aalnaim/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268133\n", + "history_begin_time" : 1654491162242, + "history_end_time" : 1654491393867, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "nfkZqgovRjlG", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>/home/aalnaim/rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython /home/aalnaim/rf_pyCaret.py\" >> /home/aalnaim/cmaq_gpu.slurm\n\nsbatch /home/aalnaim/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268131\n", + "history_begin_time" : 1654490903840, + "history_end_time" : 1654491393398, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "PamN9MxUU1K4", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\" >> /home/aalnaim/cmaq_gpu.slurm\n\nsbatch /home/aalnaim/cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268130\n", + "history_begin_time" : 1654490758893, + "history_end_time" : 1654491392901, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "zTC26oVAvMNv", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\" >> cmaq_gpu.slurm\n\n", + "history_output" : "Running", + "history_begin_time" : 1654490278351, + "history_end_time" : 1654490735341, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "VfWXgYKIY46f", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\" >> cmaq_gpu.slurm\n\nsbatch cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268129\n", + "history_begin_time" : 1654490091612, + "history_end_time" : 1654490734793, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "OsW06ht15wmz", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\" >> cmaq_gpu.slurm\n\nsbatch cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268128\n", + "history_begin_time" : 1654490008108, + "history_end_time" : 1654490081925, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "yRMgjX7O036v", + "history_input" : "echo \"#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\" > cmaq_gpu.slurm\n\nsbatch cmaq_gpu.slurm", + "history_output" : "Submitted batch job 268127\n", + "history_begin_time" : 1654489311859, + "history_end_time" : 1654489851171, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "dUmujbU0K8pG", + "history_input" : "echo \"\n#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\n\" > cmaq_gpu.slurm\n\nsbatch cmaq_gpu.slurm", + "history_output" : "sbatch: error: This does not look like a batch script. The first\nsbatch: error: line must start with #! followed by the path to an interpreter.\nsbatch: error: For instance: #!/bin/sh\n", + "history_begin_time" : 1654489261923, + "history_end_time" : 1654489851834, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "9VEMKfOKzdSL", + "history_input" : "cat <>cmaq_gpu.slurm\n#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\nEOF\n\necho \"Done!\"\necho $(pwd)", + "history_output" : "./geoweaver-9VEMKfOKzdSL.sh: line 59: EOF: command not found\nDone!\n/home/aalnaim/gw-workspace/9VEMKfOKzdSL\n", + "history_begin_time" : 1654488845481, + "history_end_time" : 1654489852569, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "YZpAntSz921L", + "history_input" : "#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=python-gpu\n#SBATCH --output=python-gpu.%j.out\n#SBATCH --error=python-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv(home+'/cmaq/training.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = home+'/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py", + "history_output" : "./geoweaver-YZpAntSz921L.sh: line 16: nvidia-smi: command not found\nLmod has detected the following error: The following module(s) are unknown:\n\"hosts/dgx\"\n\nPlease check the spelling or version number. Also try \"module spider ...\"\nIt is also possible your cache file is out-of-date; it may help to try:\n $ module --ignore-cache load \"hosts/dgx\"\n\nAlso make sure that all modulefiles written in TCL start with the string\n#%Module\n\n\n\n Latitude_x Longitude_x AirNOW_O3 ... month day hours\n0 29.489082 -81.276833 1.0 ... 3 15 12\n1 40.580200 -74.199402 37.0 ... 3 15 12\n2 39.128860 -84.504044 36.0 ... 3 15 12\n3 41.096157 -80.658905 26.0 ... 3 15 12\n4 34.635960 -82.810669 39.0 ... 3 15 12\n\n[5 rows x 18 columns]\n", + "history_begin_time" : 1654488620622, + "history_end_time" : 1654488708322, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "XCUgtFWm1NCS", + "history_input" : "cat <>cmaq.slurm\n#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\nEOF\nsbatch cmaq.slurm", + "history_output" : "./geoweaver-XCUgtFWm1NCS.sh: line 61: python: command not found\n./geoweaver-XCUgtFWm1NCS.sh: line 62: EOF: command not found\nSubmitted batch job 268125\n", + "history_begin_time" : 1654488451251, + "history_end_time" : 1654488696580, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "eIfu6yALskJK", + "history_input" : "cat <>cmaq.slurm\n#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\npython <>cmaq.slurm\n#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat <>rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\nEOF\nsbatch cmaq.slurm", + "history_output" : "./geoweaver-vhhKiNPD7vzO.sh: line 61: python: command not found\n./geoweaver-vhhKiNPD7vzO.sh: line 62: EOF: command not found\nSubmitted batch job 268123\n", + "history_begin_time" : 1654488249524, + "history_end_time" : 1654488417507, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "AuddkGqBHLQo", + "history_input" : "cat < cmaq.slurm\n#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat < rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\nEOF\nsbatch cmaq.slurm", + "history_output" : "./geoweaver-AuddkGqBHLQo.sh: line 61: python: command not found\n./geoweaver-AuddkGqBHLQo.sh: line 62: EOF: command not found\nSubmitted batch job 268122\n", + "history_begin_time" : 1654487557488, + "history_end_time" : 1654488416989, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "S4lxbt3v3lbO", + "history_input" : "#!/bin/bash\ncat < cmaq.slurm\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat < rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py\nEOF\nsbatch cmaq.slurm", + "history_output" : "./geoweaver-S4lxbt3v3lbO.sh: line 61: python: command not found\n./geoweaver-S4lxbt3v3lbO.sh: line 62: EOF: command not found\nsbatch: error: This does not look like a batch script. The first\nsbatch: error: line must start with #! followed by the path to an interpreter.\nsbatch: error: For instance: #!/bin/sh\n", + "history_begin_time" : 1654487491532, + "history_end_time" : 1654488416191, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},{ + "history_id" : "myxZd3pn5O6C", + "history_input" : "#!/bin/bash\n#SBATCH --partition=gpuq # the DGX only belongs in the 'gpu' partition\n#SBATCH --qos=gpu # need to select 'gpu' QoS\n#SBATCH --job-name=cmaq-gpu\n#SBATCH --output=cmaq-gpu.%j.out\n#SBATCH --error=cmaq-gpu.%j.err\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=64 # up to 128;\n#SBATCH --gres=gpu:A100.40gb:4 # up to 8; only request what you need\n#SBATCH --mem-per-cpu=3500M # memory per CORE; total memory is 1 TB (1,000,000 MB)\n#SBATCH --export=ALL\n#SBATCH --time=0-04:00:00 # set to 1hr; please choose carefully\nset echo\numask 0027\n# to see ID and state of GPUs assigned\nnvidia-smi\nmodule load hosts/dgx # switch to the modules on the dgx\nmodule load python\nsource /home/aalnaim/CMAQAI/bin/activate\n\ncat < rf_pyCaret.py\n# Write first python in Geoweaver# NASA GEOWEAVER\n# CMAQ-AI Model: Training Voting-XGBoost model\n\n# Importing necessary libraries\nimport pandas as pd\nimport sklearn\nfrom sklearn.ensemble import RandomForestRegressor\nfrom xgboost.sklearn import XGBRegressor\nimport pickle\nfrom pathlib import Path\n\n# home directory\nhome = str(Path.home())\n\n# importing data\nfinal=pd.read_csv('/groups/ESS/mislam25/processed_training/agg_data_2021_03_15_to_22_4_30.csv')\nprint(final.head())\nfinal=final.dropna()\n\n# Processing training data\nX = final.drop(['AirNOW_O3','Latitude_x','Longitude_x'],axis=1)\ny = final['AirNOW_O3']\n\nrf = RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=3086, verbose=0, warm_start=False)\n\nrf.fit(X, y)\n\n# save the model to disk\nfilename = '/groups/ESS/aalnaim/cmaq/models/rf.sav'\n#filename = 'D:/Research/CMAQ/local_test/xgboost.sav'\npickle.dump(rf, open(filename, 'wb'))\nEOF\npython rf_pyCaret.py", + "history_output" : "./geoweaver-myxZd3pn5O6C.sh: line 16: nvidia-smi: command not found\nLmod has detected the following error: The following module(s) are unknown:\n\"hosts/dgx\"\n\nPlease check the spelling or version number. Also try \"module spider ...\"\nIt is also possible your cache file is out-of-date; it may help to try:\n $ module --ignore-cache load \"hosts/dgx\"\n\nAlso make sure that all modulefiles written in TCL start with the string\n#%Module\n\n\n\n Latitude_x Longitude_x AirNOW_O3 ... month day hours\n0 29.489082 -81.276833 1.0 ... 3 15 12\n1 40.580200 -74.199402 37.0 ... 3 15 12\n2 39.128860 -84.504044 36.0 ... 3 15 12\n3 41.096157 -80.658905 26.0 ... 3 15 12\n4 34.635960 -82.810669 39.0 ... 3 15 12\n\n[5 rows x 18 columns]\n/home/aalnaim/CMAQAI/lib/python3.8/site-packages/sklearn/ensemble/_forest.py:396: FutureWarning: Criterion 'mse' was deprecated in v1.0 and will be removed in version 1.2. Use `criterion='squared_error'` which is equivalent.\n warn(\n", + "history_begin_time" : 1654487302837, + "history_end_time" : 1654487384411, + "history_notes" : null, + "history_process" : "wny2dz", + "host_id" : null, + "indicator" : "Stopped" +},] diff --git a/history/tjf998we0a4vxctdcq91.json b/history/tjf998we0a4vxctdcq91.json new file mode 100644 index 0000000..af601ba --- /dev/null +++ b/history/tjf998we0a4vxctdcq91.json @@ -0,0 +1,101 @@ +[{ + "history_id" : "5JvyyZEPJp4UIfaebB", + "history_input" : "iicy7w-IAjp6;is1w3m-u0DQ1;fsk7f2-n2ldn;", + "history_output" : "7wczndrdr2h;3m8h86qnfzm;zo92h4o0v63;", + "history_begin_time" : 1656459810387, + "history_end_time" : 1656460094843, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Done" +},{ + "history_id" : "PTOgwZ3PypIaSRU7OQ", + "history_input" : "iicy7w-IAjp6;is1w3m-u0DQ1;fsk7f2-n2ldn;", + "history_output" : "gbutef3h8ad;7gulavmvp4p;1om9blf3b9v;", + "history_begin_time" : 1656455484689, + "history_end_time" : 1656455770012, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Done" +},{ + "history_id" : "xfA0mWgxIUGkRjW7Ta", + "history_input" : "iicy7w-IAjp6;is1w3m-u0DQ1;is1w3m-ONXMo;", + "history_output" : "hmz4r94bwgu;hr2rrx85yut;9a4e7z9gi53;", + "history_begin_time" : 1656454294187, + "history_end_time" : 1656454576091, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Done" +},{ + "history_id" : "6JBbBd5kUuBwJdfUAF", + "history_input" : "3asyzj-SnTl0;iicy7w-IAjp6;is1w3m-u0DQ1;is1w3m-ONXMo;", + "history_output" : "p72xum4kath;71vxoc5s3s6;qp3u62b701h;1mafcnesnf9;", + "history_begin_time" : 1656453928130, + "history_end_time" : 1656454210163, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Done" +},{ + "history_id" : "JrA7u9H3hPw3UK3Emy", + "history_input" : "l8vlic-lfMZ2;3asyzj-SnTl0;iicy7w-IAjp6;is1w3m-u0DQ1;is1w3m-ONXMo;", + "history_output" : "gxs47n3fu63;tyaio8q7nov;b52wrb6e5b0;7sp4kvy7u51;mmqmr29btkk;", + "history_begin_time" : 1656451081758, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Stopped" +},{ + "history_id" : "YorzGVBJ7a4NxCUzbh", + "history_input" : "l8vlic-lfMZ2;3asyzj-SnTl0;iicy7w-IAjp6;is1w3m-u0DQ1;is1w3m-ONXMo;ex3vh9-KqTt6;b8uv5z-0dDMm;", + "history_output" : "qaas9oll99a;d4w6nlhuaxi;99qp90f5zc2;71qdytphclv;qc0f1msqrxb;orxddctze9j;h2sxykdhd89;", + "history_begin_time" : 1656428452238, + "history_end_time" : null, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Stopped" +},{ + "history_id" : "53Tc6ThNV1FS28LSag", + "history_input" : "l8vlic-lfMZ2;3asyzj-SnTl0;iicy7w-IAjp6;is1w3m-u0DQ1;is1w3m-ONXMo;ex3vh9-KqTt6;b8uv5z-0dDMm;", + "history_output" : "n6lv58mihra;tlpe3au2vjr;gopwj98w0pu;80uk9m9po5w;vf16mbs2yqi;vwnsae06o0m;gs063ewii5p;", + "history_begin_time" : 1656428410815, + "history_end_time" : 1656428429051, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Done" +},{ + "history_id" : "DGAiNyR7m5GpH9X3EZ", + "history_input" : "6oifw4-mQPVY;ah91af-YTPn7;", + "history_output" : "n6gawnwlgjz;9eib9x5vep4;", + "history_begin_time" : 1654542710128, + "history_end_time" : 1654542716355, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Done" +},{ + "history_id" : "1KzgttZ8Vy2DjouN52", + "history_input" : "l8vlic-iSepx;6oifw4-mQPVY;wny2dz-TGjYO;rjm4qm-xbeiX;", + "history_output" : "py4wx8nmbmr;dkca9b0ypz6;wn2hbzhu2ay;0dbnqvd4erk;", + "history_begin_time" : 1654535458151, + "history_end_time" : 1654535471185, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Stopped" +},{ + "history_id" : "m06UzLLG9xtjIwz4E4", + "history_input" : "l8vlic-iSepx;6oifw4-mQPVY;", + "history_output" : "5thy8s0bg8x;2x6aznc7yzz;", + "history_begin_time" : 1654493106524, + "history_end_time" : 1654500507944, + "history_notes" : null, + "history_process" : "tjf998we0a4vxctdcq91", + "host_id" : "p6wvf2;", + "indicator" : "Failed" +}] diff --git a/history/xfA0mWgxIUGkRjW7Ta.json b/history/xfA0mWgxIUGkRjW7Ta.json new file mode 100644 index 0000000..deb60a1 --- /dev/null +++ b/history/xfA0mWgxIUGkRjW7Ta.json @@ -0,0 +1,31 @@ +[{ + "history_id" : "hmz4r94bwgu", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d') #This needs to be auto date `date -d \"-2 day ${1}\" +%Y%m%d`\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') #This needs to be auto date\nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d') #This needs to be auto date\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport dir_graph=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\")\nd1 = getenv(\"stdate_post\")\nd2 = getenv(\"eddate_post\")\n\n;print(\"Passed Date: \"+date)\n\n;aconc_dir = getenv(\"postdata_dir\")\ngrid_dir = getenv(\"mcip_dir\")\nplot_dir = getenv(\"dir_graph\")\n\ncdf_file1 = addfile(\"/groups/ESS/aalnaim/cmaq/prediction_nc_files/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\n\nptime = (/\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\",\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\"/)\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\n;pm25 = cdf_file1->PM25_TOT(:,0,:,:)\n\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\n;print(max(pm25))\n;print(min(pm25))\n;print(avg(pm25))\n\n;print(time)\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; don't advance frame\nres@gsnDraw = False\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\n;res@lbBoxLinesOn = False\nres@pmLabelBarHeightF = 0.1\nres@pmLabelBarWidthF = 0.5\nres@cnFillOn=True\n;res@cnMonoFillPattern=True\n;res@cnMonoLineColor=True\nres@cnLinesOn=False\n;res@pmLabelBarDisplayMode=\"never\"\nres@gsnLeftString = \"\";\nres@gsnRightString = \"\"\n\nres@mpLimitMode = \"LatLon\"\nres@mpMinLonF = -120 ;min(lon)+0.2\nres@mpMaxLonF = -70 ;max(lon)-0.2\nres@mpMinLatF = 25 ;min(lat)+0.05\nres@mpMaxLatF = 50 ;max(lat)-0.05\nres@mpDataBaseVersion = \"MediumRes\"\n;res@tiMainString = times(it)\nres@mpDataBaseVersion = \"MediumRes\"\nres@mpDataSetName = \"Earth..4\"\nres@mpAreaMaskingOn = True\nres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nres@mpOutlineSpecifiers=\"United States : States\"\nres@mpLandFillColor = \"white\"\nres@mpInlandWaterFillColor = \"white\"\nres@mpOceanFillColor = \"white\"\nres@mpGeophysicalLineColor = \"Black\"\nres@mpGeophysicalLineThicknessF = 1.5\n\n;res@gsnSpreadColors = True\nres@lbLabelAutoStride = True\nres@lbLabelFont = 25\nres@tiXAxisFont = 25\nres@pmTickMarkDisplayMode = \"Always\"\nres@tmXBLabelFont = 25\nres@tmXBLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmYLLabelFont = 25\nres@tmYLLabelFontHeightF = 0.013\nres@tmXBLabelDeltaF = -0.5\nres@tmXTLabelsOn = False\nres@tmXTLabelFont = 25\nres@tmXTLabelFontHeightF = 0.013\nres@tmYRLabelsOn = False\nres@tmYRLabelFont = 25\nres@tmYRLabelFontHeightF = 0.013\n\n\nres@mpProjection = \"LambertConformal\" ;\"CylindricalEquidistant\"\nres@mpLambertParallel1F = 33.\nres@mpLambertParallel2F = 45.\nres@mpLambertMeridianF = -98.\n\nres@cnLevelSelectionMode = \"ManualLevels\"\nres@cnMinLevelValF = 0.\nres@cnMaxLevelValF = 80\nres@cnLevelSpacingF = 4\n\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n pname=plot_dir+\"/testPlot_\"+pdate+\"_\"+ptime(it)\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n res@tiMainString = pdate+\" \"+ptime(it)+\" UTC O~B~3~N~ Forecast (ppbV)\"\n plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n draw(plot)\n frame(wks)\n delete(wks)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\nend do\ndelete(res)\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\n\n# convert -delay 100 *.png 20220613_20220614.gif\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/testPlot*.png /groups/ESS/aalnaim/cmaq/plots/Map_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nelse\n echo \"Generating images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3.ncl...\n", + "history_begin_time" : 1656454294420, + "history_end_time" : 1656454573066, + "history_notes" : null, + "history_process" : "iicy7w", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "hr2rrx85yut", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454294901, + "history_end_time" : 1656454575582, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +},{ + "history_id" : "9a4e7z9gi53", + "history_input" : "#!/bin/bash\n\n# Setting env variables\nexport YYYYMMDD_POST=$(date -d '3 day ago' '+%Y%m%d')\nexport stdate_post=$(date -d '3 day ago' '+%Y-%m-%d') \nexport eddate_post=$(date -d '2 day ago' '+%Y%m%d')\n\n\nexport postdata_dir=\"/groups/ESS/aalnaim/cmaq/prediction_nc_files\"\nexport mcip_dir=\"/groups/ESS/share/projects/SWUS3km/data/cmaqdata/mcip/12km\"\nexport graph_dir=\"/groups/ESS/aalnaim/cmaq/plots\"\n\nexport obs_dir_NCL=\"/groups/ESS/share/projects/SWUS3km/data/OBS/AirNow/AQF5X\"\n\nmodule load ncl\n\ncat <>/groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_code.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/gsn_csm.ncl\"\nload \"/opt/sw/spack/apps/linux-centos8-cascadelake/gcc-9.3.0-openmpi-4.0.4/ncl-6.6.2-fr/lib/ncarg/nclscripts/csm/contributed.ncl\"\n\nsetvalues NhlGetWorkspaceObjectId()\n\"wsMaximumSize\": 600000000\nend setvalues\n\nbegin\n\ndate = getenv(\"YYYYMMDD_POST\") \nd1 = getenv(\"stdate_post\") \nd2 = getenv(\"eddate_post\") \n\nobs_dir = getenv(\"obs_dir_NCL\")\nplot_dir = getenv(\"graph_dir\") \n\nhr=new(24,\"string\")\nhr=(/\"00\",\"01\",\"02\",\"03\",\"04\",\"05\",\"06\",\"07\",\"08\",\"09\",\"10\",\"11\",\"12\",\"13\",\"14\",\"15\",\"16\",\"17\",\"18\",\"19\",\"20\",\"21\",\"22\",\"23\"/)\n\nprint(plot_dir)\naconc_dir = getenv(\"postdata_dir\") \ngrid_dir = getenv(\"mcip_dir\") \n\ncdf_file1 = addfile(aconc_dir+\"/COMBINE3D_ACONC_v531_gcc_AQF5X_\"+date+\"_ML_extracted.nc\",\"r\")\ncdf_file= addfile(grid_dir+\"/GRIDCRO2D_\"+date+\".nc\",\"r\")\ncdf_file2= addfile(grid_dir+\"/METCRO2D_\"+date+\".nc\",\"r\")\n\ntime = cdf_file1->TFLAG(:,0,:)\no3 = cdf_file1->O3(:,:,:) ;ppb\nwspd10=cdf_file2->WSPD10(:,0,:,:)\nwdir10=cdf_file2->WDIR10(:,0,:,:)\n\ntemp = cdf_file2->TEMP2\n\nnt = dimsizes(o3(:,0,0))\nny = dimsizes(o3(0,:,0))\nnx = dimsizes(o3(0,0,:))\n\nprint(max(temp))\nprint(min(temp))\nprint(avg(temp))\n\n\n\nprint(nt+\" \"+ny+\" \"+nx)\nprint(max(o3))\nprint(min(o3))\nprint(avg(o3))\n\nlat = cdf_file->LAT(0,0,:,:)\nlon = cdf_file->LON(0,0,:,:)\n\no3@lat2d = lat\no3@lon2d = lon\no3@unit = \"ppbv\"\n\nUV10=wind_component(wspd10,wdir10,0)\nUV10@lat2d = lat\nUV10@lon2d = lon\n\n\nres = True\nres@gsnMaximize = True ; maximize pot in frame\nres@gsnFrame = False ; dont advance frame\nres@gsnDraw = False\nres@gsnLeftString = \"\"\nres@gsnRightString = \"\"\nres@txFont = \"times-roman\"\nres@tiMainFont = \"times-roman\"\n;res@tiMainFontHeightF = 0.02\n;res@vpWidthF = 0.7\n;res@vpHeightF = 0.7\n\n;;set map;;\nmpres = res\nmpres@mpLimitMode = \"LatLon\"\nmpres@mpDataSetName = \"Earth..4\"\nmpres@mpDataBaseVersion = \"MediumRes\"\nmpres@mpOutlineOn = True\nmpres@mpGeophysicalLineThicknessF = 1.5\nmpres@mpFillDrawOrder = \"PostDraw\"\nmpres@mpFillOn = False\nmpres@mpAreaMaskingOn = True\nmpres@mpOutlineBoundarySets = \"GeophysicalAndUSStates\"\nmpres@mpOutlineSpecifiers = \"United States:States\"\nmpres@mpProjection = \"LambertConformal\"\nmpres@mpLambertParallel1F = 33.\nmpres@mpLambertParallel2F = 45.\nmpres@mpLambertMeridianF = -98.\nmpres@mpMinLonF = -120 ;min(lon)+0.2\nmpres@mpMaxLonF = -70 ;max(lon)-0.2\nmpres@mpMinLatF = 25 ;min(lat)+0.05\nmpres@mpMaxLatF = 50 ;max(lat)-0.05\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@mpLandFillColor = \"white\"\nmpres@mpInlandWaterFillColor = \"white\"\nmpres@mpOceanFillColor = \"white\"\nmpres@mpGeophysicalLineColor = \"Black\"\n\n;mpres@lbLabelAutoStride = True\nmpres@tiXAxisFont = 25\nmpres@pmTickMarkDisplayMode = \"Always\"\nmpres@tmXBLabelFont = 25\nmpres@tmXBLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmYLLabelFont = 25\nmpres@tmYLLabelFontHeightF = 0.013\nmpres@tmXBLabelDeltaF = -0.5\nmpres@tmXTLabelsOn = False\nmpres@tmXTLabelFont = 25\nmpres@tmXTLabelFontHeightF = 0.013\nmpres@tmYRLabelsOn = False\nmpres@tmYRLabelFont = 25\nmpres@tmYRLabelFontHeightF = 0.013\n\n;;set contour;;\ncnres = res\ncnres@cnFillDrawOrder = \"PreDraw\"\ncnres@cnFillOn = True\ncnres@cnLinesOn = False\ncnres@cnLineLabelsOn = False\ncnres@lbLabelFont = 25\ncnres@lbLabelFontHeightF = 0.013\ncnres@tiXAxisFont = 25\ncnres@pmLabelBarWidthF = 0.5\ncnres@pmLabelBarHeightF = 0.1\n;cnres@pmLabelBarOrthogonalPosF = -0.02\ncnres@lbLabelAutoStride = True\n\n;set vector;;\nres_vc = res\nres_vc@vcGlyphStyle = \"LineArrow\"\nres_vc@vcLineArrowThicknessF = 3\nres_vc@vcMinDistanceF = 0.03\nres_vc@vcRefLengthF = 0.03\nres_vc@vcRefAnnoOn = True\nres_vc@vcRefMagnitudeF = 16\nres_vc@vcRefAnnoString1 = \"16m/s\"\nres_vc@vcRefAnnoSide = \"Top\"\nres_vc@vcRefAnnoString2On = False\nres_vc@vcRefAnnoPerimOn = False\nres_vc@vcRefAnnoOrthogonalPosF = -0.02\nres_vc@vcRefAnnoParallelPosF = 0.999\n;res_vc@vcRefAnnoBackgroundColor = \"White\"\nres_vc@vcVectorDrawOrder = \"PostDraw\"\n\ndo it = 0, nt-1\n if (it .lt. 12) then\n pdate=d1\n else\n pdate=d2\n end if\n\n ;print(time(it,0)+\" \"+time(it,1))\n rundate = yyyyddd_to_yyyymmdd( time(it,0) )\n runtime = hr( tointeger(time(it,1)/10000) )\n\n site = readAsciiTable(obs_dir+\"/AQF5X_Hourly_\"+rundate+runtime+\".dat\",1,\"string\",1)\n nrows = dimsizes(site)\n sitename = str_get_field(site,1,\",\")\n sitelat = stringtofloat(str_get_field(site,2,\",\"))\n sitelon = stringtofloat(str_get_field(site,3,\",\"))\n O3_obs = stringtofloat(str_get_field(site,4,\",\"))\n\n obslon = sitelon(:,0)\n obslat = sitelat(:,0)\n obsO3 = O3_obs(:,0)\n\n npts = nrows(0)\n\n obsO3@_FillValue = -999.\n\n;--- levels for dividing\n levels_O3 = ispan(0,80,4)\n\n nlevels = dimsizes(levels_O3)\n\n colors = span_color_rgba(\"WhiteBlueGreenYellowRed\",nlevels+1)\n\n num_distinct_markers = nlevels+1 ; number of distinct markers\n lat_O3 = new((/num_distinct_markers,npts/),float)\n lon_O3 = new((/num_distinct_markers,npts/),float)\n lat_O3 = -999\n lon_O3 = -999\n\n\n;\n; Group the points according to which range they fall in. At the\n; same time, create the label that we will use later in the labelbar\n;\n do i = 0, num_distinct_markers-1\n if (i.eq.0) then\n indexes_O3 = ind(obsO3(:).lt.levels_O3(0))\n end if\n if (i.eq.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.max(levels_O3))\n end if\n if (i.gt.0.and.i.lt.num_distinct_markers-1) then\n indexes_O3 = ind(obsO3(:).ge.levels_O3(i-1).and.obsO3(:).lt.levels_O3(i))\n end if\n\n;\n; Now that we have the set of indexes whose values fall within\n; the given range, take the corresponding lat/lon values and store\n; them, so later we can color this set of markers with the appropriate\n; color.\n;\n if (.not.any(ismissing(indexes_O3))) then\n npts_range_O3 = dimsizes(indexes_O3) ; # of points in this range.\n\n lat_O3(i,0:npts_range_O3-1) = obslat(indexes_O3)\n lon_O3(i,0:npts_range_O3-1) = obslon(indexes_O3)\n ;print(\"O3: \"+npts_range_O3)\n end if\n\n\n delete(indexes_O3) ; Necessary b/c \"indexes\" may be a different\n end do\n\n lat_O3@_FillValue = -999\n lon_O3@_FillValue = -999\n\n gsres = True\n gsres@gsMarkerIndex = 16 ; Use filled dots for markers.\n\n hollowres = True\n hollowres@gsMarkerIndex = 4\n hollowres@gsMarkerColor = \"black\"\n hollowres@gsMarkerSizeF = 0.008\n\n;;;;;;;;; Plot Ozone\n pname=plot_dir+\"/OBS-FORECAST_O3_\"+rundate+runtime\n wks = gsn_open_wks(\"png\",pname)\n gsn_define_colormap(wks, \"WhiteBlueGreenYellowRed\")\n\n pmid_O3 = new(num_distinct_markers,graphic)\n hollow_O3 = new(num_distinct_markers,graphic)\n\n cnres@tiMainString = pdate+\" \"+runtime+\" UTC O~B~3~N~ (ppbV)\"\n cnres@cnLevelSelectionMode = \"ManualLevels\"\n cnres@cnMinLevelValF = 0.\n cnres@cnMaxLevelValF = 80\n cnres@cnLevelSpacingF = 4\n\n ;plot = gsn_csm_contour_map(wks,o3(it,:,:),res)\n map = gsn_csm_map(wks,mpres)\n contour = gsn_csm_contour(wks,o3(it,:,:),cnres)\n vector = gsn_csm_vector(wks,UV10(0,it,:,:),UV10(1,it,:,:),res_vc)\n overlay(map,contour)\n overlay(map,vector)\n\n pmid = new(num_distinct_markers,graphic)\n hollow = new(num_distinct_markers,graphic)\n do i = 0, num_distinct_markers-1\n if (.not.ismissing(lat_O3(i,0)))\n gsres@gsMarkerColor = colors(i,:)\n gsres@gsMarkerSizeF = 0.008\n gsres@gsMarkerThicknessF = 1\n pmid(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),gsres)\n hollow(i) = gsn_add_polymarker(wks,vector,lon_O3(i,:),lat_O3(i,:),hollowres)\n end if\n end do\n\n draw(map)\n frame(wks)\n delete(wks)\n delete(pmid_O3)\n delete(hollow_O3)\n system(\"composite -geometry 100x70+900+900 /groups/ESS/aalnaim/cmaq/mason-logo-green.png \"+pname+\".png \"+pname+\".png\")\n\n\n delete(pmid)\n delete(hollow)\n delete(site)\n delete(sitename)\n delete(sitelat)\n delete(sitelon)\n delete(O3_obs)\n delete(obslon)\n delete(obslat)\n delete(obsO3)\n delete([/lon_O3,lat_O3/])\n\nend do\ndelete(res)\n\n;/\n\nend\nEOF\n\n\nncl /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\n\nconvert -delay 100 /groups/ESS/aalnaim/cmaq/plots/OBS*.png /groups/ESS/aalnaim/cmaq/plots/Airnow_$YYYYMMDD_POST_correct.gif\n\nif [ $? -eq 0 ]; then\n echo \"Generating AirNow images/gif Completed Successfully\"\n\techo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nelse\n echo \"Generating AirNow images/gif Failed!\"\n echo \"Removing ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\"\n\trm /groups/ESS/aalnaim/cmaq/geoweaver_plot_daily_O3_Airnow.ncl\nfi\n", + "history_output" : " Copyright (C) 1995-2019 - All Rights Reserved\n University Corporation for Atmospheric Research\n NCAR Command Language Version 6.6.2\n The use of this software is governed by a License Agreement.\n See http://www.ncl.ucar.edu/ for more details.\n\n\nVariable: plot_dir\nType: string\nTotal Size: 8 bytes\n 1 values\nNumber of Dimensions: 1\nDimensions and sizes:\t[1]\nCoordinates: \n(0)\t/groups/ESS/aalnaim/cmaq/plots\n(0)\t316.8002\n(0)\t262.6003\n(0)\t293.9359\n(0)\t24 265 442\n(0)\t85.06\n(0)\t4.506333333333333\n(0)\t30.63602873848283\nGenerating AirNow images/gif Completed Successfully\nRemoving ncl file: geoweaver_plot_daily_O3_Airnow.ncl...\n", + "history_begin_time" : 1656454576091, + "history_end_time" : 1656454858998, + "history_notes" : null, + "history_process" : "is1w3m", + "host_id" : "p6wvf2", + "indicator" : "Done" +}] diff --git a/workflow.json b/workflow.json new file mode 100644 index 0000000..15efa3e --- /dev/null +++ b/workflow.json @@ -0,0 +1,9 @@ +{ + "id" : "tjf998we0a4vxctdcq91", + "name" : "predict_CMAQ", + "description" : "", + "owner" : "111111", + "confidential" : "FALSE", + "edges" : "[{\"source\":{\"title\":\"generate_images_ncl\",\"id\":\"iicy7w-IAjp6\",\"x\":702.5603637695312,\"y\":332.4066467285156,\"color\":\"\"},\"target\":{\"title\":\"evaluate_prediction_ncl\",\"id\":\"fsk7f2-n2ldn\",\"x\":832,\"y\":481,\"color\":\"\"}},{\"source\":{\"title\":\"generate_AirNow_ncl\",\"id\":\"is1w3m-u0DQ1\",\"x\":954.1414794921875,\"y\":332.28587341308594,\"color\":\"\"},\"target\":{\"title\":\"evaluate_prediction_ncl\",\"id\":\"fsk7f2-n2ldn\",\"x\":832,\"y\":481,\"color\":\"\"}},{\"source\":{\"title\":\"processing_test_data\",\"id\":\"ex3vh9-tldYX\",\"x\":263.6973114013672,\"y\":403.7543029785156,\"color\":\"\"},\"target\":{\"title\":\"test_data\",\"id\":\"b8uv5z-TOfCs\",\"x\":330.0514221191406,\"y\":242.98955535888672,\"color\":\"\"}},{\"source\":{\"title\":\"test_data\",\"id\":\"b8uv5z-TOfCs\",\"x\":330.0514221191406,\"y\":242.98955535888672,\"color\":\"\"},\"target\":{\"title\":\"rf_prediction\",\"id\":\"l8vlic-LUtKb\",\"x\":567.7667236328125,\"y\":120.17420959472656,\"color\":\"\"}},{\"source\":{\"title\":\"rf_prediction\",\"id\":\"l8vlic-LUtKb\",\"x\":567.7667236328125,\"y\":120.17420959472656,\"color\":\"\"},\"target\":{\"title\":\"processing_test_netcdf\",\"id\":\"3asyzj-uzRoI\",\"x\":796.9434204101562,\"y\":207.27098083496094,\"color\":\"\"}},{\"source\":{\"title\":\"processing_test_netcdf\",\"id\":\"3asyzj-uzRoI\",\"x\":796.9434204101562,\"y\":207.27098083496094,\"color\":\"\"},\"target\":{\"title\":\"generate_images_ncl\",\"id\":\"iicy7w-IAjp6\",\"x\":702.5603637695312,\"y\":332.4066467285156,\"color\":\"\"}},{\"source\":{\"title\":\"processing_test_netcdf\",\"id\":\"3asyzj-uzRoI\",\"x\":796.9434204101562,\"y\":207.27098083496094,\"color\":\"\"},\"target\":{\"title\":\"generate_AirNow_ncl\",\"id\":\"is1w3m-u0DQ1\",\"x\":954.1414794921875,\"y\":332.28587341308594,\"color\":\"\"}}]", + "nodes" : "[{\"title\":\"generate_images_ncl\",\"id\":\"iicy7w-IAjp6\",\"x\":702.5603637695312,\"y\":332.4066467285156,\"color\":\"\"},{\"title\":\"generate_AirNow_ncl\",\"id\":\"is1w3m-u0DQ1\",\"x\":954.1414794921875,\"y\":332.28587341308594,\"color\":\"\"},{\"title\":\"evaluate_prediction_ncl\",\"id\":\"fsk7f2-n2ldn\",\"x\":832,\"y\":481,\"color\":\"\"},{\"title\":\"processing_test_data\",\"id\":\"ex3vh9-tldYX\",\"x\":263.6973114013672,\"y\":403.7543029785156,\"color\":\"\"},{\"title\":\"test_data\",\"id\":\"b8uv5z-TOfCs\",\"x\":330.0514221191406,\"y\":242.98955535888672,\"color\":\"\"},{\"title\":\"rf_prediction\",\"id\":\"l8vlic-LUtKb\",\"x\":567.7667236328125,\"y\":120.17420959472656,\"color\":\"\"},{\"title\":\"processing_test_netcdf\",\"id\":\"3asyzj-uzRoI\",\"x\":796.9434204101562,\"y\":207.27098083496094,\"color\":\"\"}]" +}