Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions src/acquisition_master.R
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ ms_init <- function(use_gpu = FALSE,
return(instance_details)
}

ms_instance <- ms_init(use_ms_error_handling = TRUE,
ms_instance <- ms_init(use_ms_error_handling = FALSE,
# force_machine_status = 'n00b',
config_storage_location = 'remote')

Expand Down Expand Up @@ -264,7 +264,7 @@ ms_globals <- c(ls(all.names = TRUE), 'ms_globals')

dir.create('logs', showWarnings = FALSE)

# dmnrow = 8
# dmnrow = 12
# print(network_domain, n=50)
for(dmnrow in 1:nrow(network_domain)){

Expand All @@ -281,7 +281,7 @@ for(dmnrow in 1:nrow(network_domain)){
# held_data = invalidate_tracked_data(network, domain, 'derive')
# owrite_tracker(network, domain)

# held_data = invalidate_tracked_data(network, domain, 'munge', 'precipitation')
# held_data = invalidate_tracked_data(network, domain, 'munge', 'stream_chemistry')
# owrite_tracker(network, domain)
# held_data = invalidate_tracked_data(network, domain, 'derive', 'stream_flux_inst')
# owrite_tracker(network, domain)
Expand All @@ -297,7 +297,7 @@ for(dmnrow in 1:nrow(network_domain)){
update_product_statuses(network = network,
domain = domain)
get_all_local_helpers(network = network,
domain = domain)
domain = domain)

ms_retrieve(network = network,
# prodname_filter = c('stream_chemistry'),
Expand All @@ -312,7 +312,7 @@ for(dmnrow in 1:nrow(network_domain)){
verbose = TRUE))
}
ms_derive(network = network,
prodname_filter = c('discharge'),
prodname_filter = c('precip_pchem_pflux'),
domain = domain)

if(domain != 'mcmurdo'){
Expand Down
14 changes: 14 additions & 0 deletions src/dev/dev_helpers.R
Original file line number Diff line number Diff line change
Expand Up @@ -1423,3 +1423,17 @@ insert_retrieval_datetimes <- function(){
write_lines(rt, f)
}
}

get_nonnumerics <- function(d){

#gets unique nonnumeric values by row. useful for identifying quality codes
#within data columns

nonnumerics = apply(d, 2, function(x){
xx = as.numeric(x)
nonnumerics = is.na(xx)
out = unique(x[nonnumerics])
})

return(nonnumerics)
}
4 changes: 4 additions & 0 deletions src/global/function_aliases.R
Original file line number Diff line number Diff line change
Expand Up @@ -50,5 +50,9 @@ map = purrr::map
map2 = purrr::map
st_read = sf::st_read
errors = errors::errors
drop_errors = errors::drop_errors
set_errors = errors::set_errors
pivot_wider = tidyr::pivot_wider
pivot_longer = tidyr::pivot_longer
rename = dplyr::rename
where = tidyselect:::where
22 changes: 5 additions & 17 deletions src/global/general_kernels.R
Original file line number Diff line number Diff line change
Expand Up @@ -1822,36 +1822,24 @@ process_3_ms824 <- function(network, domain, prodname_ms, site_code,

googledrive::drive_rm('GEE/rgee.csv', verbose = FALSE)


final <- fin_table %>%
fin_table <- fin_table %>%
select(date, site_code, dayl, prcp, srad, swe, tmax, tmin, vp)

if(nrow(final) == 0){
if(nrow(fin_table) == 0){
return(generate_ms_exception(glue('No data was retrived for {s}',
s = site_code)))
}

dir.create(glue('data/{n}/{d}/ws_traits/daymet/',
n = network,
d = domain))
d = domain),
showWarnings = FALSE)

file_path <- glue('data/{n}/{d}/ws_traits/daymet/domain_climate.feather',
n = network,
d = domain)

write_feather(final, file_path)

# type <- str_split_fixed(prodname_ms, '__', n = Inf)[,1]
#
# dir <- glue('data/{n}/{d}/ws_traits/{v}/',
# n = network, d = domain, v = type)
#
# final <- append_unprod_prefix(final, prodname_ms)
# final_sum <- append_unprod_prefix(final_sum, prodname_ms)
#
# save_general_files(final_file = final_sum,
# raw_file = final,
# domain_dir = dir)
write_feather(fin_table, file_path)

return()
}
Expand Down
Loading