Skip to content

Commit

Permalink
Merge pull request #452 from njtierney/use-glue-378
Browse files Browse the repository at this point in the history
use glue instead of paste or paste0
  • Loading branch information
njtierney committed Nov 5, 2021
2 parents d1086f9 + f54fc47 commit 112a968
Show file tree
Hide file tree
Showing 16 changed files with 73 additions and 82 deletions.
2 changes: 2 additions & 0 deletions DESCRIPTION
Expand Up @@ -86,6 +86,8 @@ Collate:
Imports:
abind,
callr,
yesno,
glue,
cli (>= 3.0.0),
coda,
methods,
Expand Down
2 changes: 1 addition & 1 deletion NEWS.md
Expand Up @@ -52,7 +52,7 @@

* Many message, warning, and error prompts have been replaced internally with the {cli} R package for nicer printing. This is a minor change that should result in a more pleasant user experience (#423 #425).


* Internally, where sensible, `greta` now uses the `glue` package to create messages/ouputs (#378).

# greta 0.3.1

Expand Down
15 changes: 9 additions & 6 deletions R/dag_class.R
Expand Up @@ -86,7 +86,7 @@ dag_class <- R6Class(
expr <- deparse(substitute(expr))
}

expr <- paste0("sess$run(", expr, ", feed_dict = feed_dict)")
expr <- glue::glue("sess$run({expr}, feed_dict = feed_dict)")

self$tf_run(expr, as_text = TRUE)
},
Expand Down Expand Up @@ -279,7 +279,7 @@ dag_class <- R6Class(
args <- list(free_state)
}

names <- paste0(names(params), "_free")
names <- glue::glue("{names(params)}_free")

for (i in seq_along(names)) {
assign(names[i], args[[i]], envir = tfe)
Expand Down Expand Up @@ -381,7 +381,7 @@ dag_class <- R6Class(
# define adjusted joint density

# get names of Jacobian adjustment tensors for all variable nodes
adj_names <- paste0(self$get_tf_names(types = "variable"), "_adj")
adj_names <- glue::glue("{self$get_tf_names(types = 'variable')}_adj")

# get TF density tensors for all distribution
adj <- lapply(adj_names, get, envir = self$tf_environment)
Expand Down Expand Up @@ -537,11 +537,11 @@ dag_class <- R6Class(
parameters
},
get_tf_data_list = function() {
data_list_name <- paste0(self$mode, "_data_list")
data_list_name <- glue::glue("{self$mode}_data_list")
self$tf_environment[[data_list_name]]
},
set_tf_data_list = function(element_name, value) {
data_list_name <- paste0(self$mode, "_data_list")
data_list_name <- glue::glue("{self$mode}_data_list")
self$tf_environment[[data_list_name]][[element_name]] <- value
},
build_feed_dict = function(dict_list = list(),
Expand Down Expand Up @@ -723,7 +723,10 @@ dag_class <- R6Class(
# find the cluster IDs
n <- nrow(r)
neighbours <- lapply(seq_len(n), function(i) which(r[i, ]))
cluster_names <- vapply(neighbours, paste, collapse = "_", FUN.VALUE = "")
cluster_names <- vapply(X = neighbours,
FUN = paste,
FUN.VALUE = character(1),
collapse = "_")
cluster_id <- match(cluster_names, unique(cluster_names))

# name them
Expand Down
32 changes: 13 additions & 19 deletions R/greta_array_class.R
Expand Up @@ -173,9 +173,8 @@ as.greta_array.default <- function(x, optional = FALSE, original_x = x, ...) {
#' @export
print.greta_array <- function(x, ...) {
node <- get_node(x)
text <- sprintf(
"greta array (%s)\n\n",
node$description()
text <- glue::glue(
"greta array ({node$description()})\n\n\n"
)

cat(text)
Expand Down Expand Up @@ -205,27 +204,23 @@ summary.greta_array <- function(object, ...) {
print.summary.greta_array <- function(x, ...) {

# array type
type_text <- sprintf(
"'%s' greta array",
x$type
type_text <- glue::glue(
"'{x$type}' greta array"
)

if (x$length == 1) {
shape_text <- "with 1 element"
} else {
dim_text <- paste(x$dim, collapse = "x")
shape_text <- sprintf(
"with %i elements (%s)",
x$length,
dim_text
dim_text <- glue::glue_collapse(x$dim, sep = "x")
shape_text <- glue::glue(
"with {x$length} elements ({dim_text})"
)
}

# distribution info
if (!is.null(x$distribution_name)) {
distribution_text <- sprintf(
"following a %s distribution",
x$distribution_name
distribution_text <- glue::glue(
"following a {x$distribution_name} distribution"
)
} else {
distribution_text <- ""
Expand All @@ -238,10 +233,9 @@ print.summary.greta_array <- function(x, ...) {
values_text <- paste0("\n", paste(values_print, collapse = "\n"))
}

text <- paste(type_text, shape_text, distribution_text, paste0(
"\n",
values_text
))
text <- glue::glue(
"{type_text} {shape_text} {distribution_text} \n {values_text}"
)
cat(text)
invisible(x)
}
Expand All @@ -254,7 +248,7 @@ str.greta_array <- function(object, ...) {
array <- unclass(value)
string <- capture.output(str(array))
string <- gsub("NA", "?", string)
string <- paste("'greta_array'", string)
string <- glue::glue("'greta_array' {string}")
cat(string)
}

Expand Down
18 changes: 10 additions & 8 deletions R/inference.R
Expand Up @@ -311,14 +311,16 @@ run_samplers <- function(samplers,
!is.null(greta_stash$callbacks)

if (plan_is$parallel & plan_is$local & length(samplers) > 1) {
cores_text <- ifelse(n_cores == 1,
"1 core",
sprintf("up to %i cores", n_cores)
cores_text <- ifelse(
test = n_cores == 1,
yes = "1 core",
no = glue::glue("up to {n_cores} cores")
)
msg <- sprintf(
"\nrunning %i samplers in parallel, each on %s\n\n",
length(samplers),
cores_text
msg <- glue::glue(
"\n",
"running {length(samplers)} samplers in parallel, ",
"each on {cores_text}",
"\n\n"
)
message(msg)
}
Expand Down Expand Up @@ -645,7 +647,7 @@ parse_initial_values <- function(initials, dag) {
# variable

# find the corresponding nodes and check they are variable nodes
forward_names <- paste0("all_forward_", dag$node_tf_names)
forward_names <- glue::glue("all_forward_{dag$node_tf_names}")
nodes <- dag$node_list[match(tf_names, forward_names)]
types <- lapply(nodes, node_type)
are_variables <- vapply(types, identical, "variable", FUN.VALUE = FALSE)
Expand Down
25 changes: 10 additions & 15 deletions R/inference_class.R
Expand Up @@ -70,10 +70,8 @@ inference <- R6Class(
write_percentage_log = function(total, completed, stage) {
if (!is.null(self$percentage_file)) {
percentage <- round(100 * completed / total)
msg <- sprintf(
"%s %i%%",
stage,
percentage
msg <- glue::glue(
"{stage} {percentage}%"
)
writeLines(msg, self$percentage_file)
}
Expand Down Expand Up @@ -499,25 +497,22 @@ sampler <- R6Class(
msg <- ""

if (self$n_samplers > 1) {
msg <- sprintf(
"\nsampler %i/%i",
self$sampler_number,
self$n_samplers
msg <- glue::glue(
"\n\nsampler {self$sampler_number}/{self$n_samplers}"
)
}

if (self$n_chains > 1) {
n_cores <- self$model$dag$n_cores

cores_text <- ifelse(n_cores == 1,
"1 core",
sprintf("up to %i cores", n_cores)
cores_text <- ifelse(
test = n_cores == 1,
yes = "1 core",
no = glue::glue("up to {n_cores} cores")
)

msg <- sprintf(
"\nrunning %i chains simultaneously on %s",
self$n_chains,
cores_text
msg <- glue::glue(
"\n\nrunning {self$n_chains} chains simultaneously on {cores_text}"
)
}

Expand Down
2 changes: 1 addition & 1 deletion R/joint.R
Expand Up @@ -130,7 +130,7 @@ joint_distribution <- R6Class(

for (i in seq_len(n_distributions)) {
self$add_parameter(distribs[[i]],
paste("distribution", i),
glue::glue("distribution {i}"),
shape_matches_output = FALSE
)
}
Expand Down
2 changes: 1 addition & 1 deletion R/mixture.R
Expand Up @@ -246,7 +246,7 @@ mixture_distribution <- R6Class(

for (i in seq_len(n_distributions)) {
self$add_parameter(distribs[[i]],
paste("distribution", i),
glue::glue("distribution {i}"),
shape_matches_output = FALSE
)
}
Expand Down
10 changes: 4 additions & 6 deletions R/node_class.R
Expand Up @@ -237,18 +237,16 @@ node <- R6Class(
text <- node_type(self)

if (has_distribution(self)) {
text <- paste(
text,
"following a",
self$distribution$distribution_name,
"distribution"
text <- glue::glue(
"{text} following a ",
"{self$distribution$distribution_name} distribution"
)
}

text
},
get_unique_name = function() {
self$unique_name <- paste0("node_", rhex())
self$unique_name <- glue::glue("node_{rhex()}")
},
plotting_label = function() {
label <- ""
Expand Down
12 changes: 6 additions & 6 deletions R/node_types.R
Expand Up @@ -13,7 +13,7 @@ data_node <- R6Class(
tf = function(dag) {
tfe <- dag$tf_environment
tf_name <- dag$tf_name(self)
unbatched_name <- paste0(tf_name, "_unbatched")
unbatched_name <- glue::glue("{tf_name}_unbatched")

mode <- dag$how_to_define(self)

Expand Down Expand Up @@ -226,7 +226,7 @@ variable_node <- R6Class(

# pass a string depending on whether they are all the same
if (all(constraint_array == constraint_array[1])) {
self$constraint <- paste0("scalar_all_", constraint_array[1])
self$constraint <- glue::glue("scalar_all_{constraint_array[1]}")
} else {
self$constraint <- "scalar_mixed"
}
Expand Down Expand Up @@ -307,11 +307,11 @@ variable_node <- R6Class(
# if we're defining the forward mode graph, get the free state, transform,
# and compute any transformation density
if (mode == "forward") {
free_name <- sprintf("%s_free", tf_name)
free_name <- glue::glue("{tf_name}_free")

# create the log jacobian adjustment for the free state
tf_adj <- self$tf_adjustment(dag)
adj_name <- sprintf("%s_adj", tf_name)
adj_name <- glue::glue("{tf_name}_adj")
assign(adj_name,
tf_adj,
envir = dag$tf_environment
Expand Down Expand Up @@ -388,7 +388,7 @@ variable_node <- R6Class(
tf_adjustment = function(dag) {

# find free version of node
free_tensor_name <- paste0(dag$tf_name(self), "_free")
free_tensor_name <- glue::glue("{dag$tf_name(self)}_free")
free_tensor <- get(free_tensor_name, envir = dag$tf_environment)

# apply jacobian adjustment to it
Expand Down Expand Up @@ -610,7 +610,7 @@ distrib <- function(distribution, ...) {
check_tf_version("error")

# get and initialize the distribution, with a default value node
constructor <- get(paste0(distribution, "_distribution"),
constructor <- get(glue::glue("{distribution}_distribution"),
envir = parent.frame()
)
distrib <- constructor$new(...)
Expand Down
4 changes: 2 additions & 2 deletions R/optimisers.R
Expand Up @@ -64,7 +64,7 @@ define_scipy_optimiser <- function(name,
class = scipy_optimiser
)

class_name <- paste0(name, "_optimiser")
class_name <- glue::glue("{name}_optimiser")
class(obj) <- c(class_name, "optimiser")
obj
}
Expand All @@ -81,7 +81,7 @@ define_tf_optimiser <- function(name,
other_args = other_args
)

class_name <- paste0(name, "_optimiser")
class_name <- glue::glue("{name}_optimiser")
class(obj) <- c(class_name, "optimiser")
obj
}
Expand Down
8 changes: 3 additions & 5 deletions R/progress_bar.R
Expand Up @@ -27,10 +27,8 @@ create_progress_bar <- function(phase, iter, pb_update, width, ...) {
count_pad <- paste0(rep(" ", 2 * digit_diff), collapse = "")

# formatting
format_text <- sprintf(
" %s :bar %s:iter/:total | eta: :eta :rejection",
name,
count_pad
format_text <- glue::glue(
" {name} :bar {count_pad}:iter/:total | eta: :eta :rejection",
)

pb <- progress::progress_bar$new(
Expand Down Expand Up @@ -94,7 +92,7 @@ iterate_progress_bar <- function(pb, it, rejects, chains, file = NULL) {
pad_char <- pmax(0, 2 - nchar(reject_perc_string))
pad <- paste0(rep(" ", pad_char), collapse = "")

reject_text <- paste0("| ", reject_perc_string, "% bad", pad)
reject_text <- glue::glue("| {reject_perc_string}% bad{pad}")
} else {
reject_text <- " "
}
Expand Down
14 changes: 6 additions & 8 deletions R/samplers.R
Expand Up @@ -103,15 +103,13 @@ print.sampler <- function(x, ...) {

if (!nzchar(values_text)) values_text <- "None"

parameters_text <- sprintf(
"parameters:\n %s",
values_text
)
parameters_text <- glue::glue("
parameters:
{values_text}
")

msg <- sprintf(
"%s object with %s",
class(x)[1],
parameters_text
msg <- glue::glue(
"{class(x)[1]} object with {parameters_text}"
)

cat(msg)
Expand Down
2 changes: 1 addition & 1 deletion R/tf_functions.R
Expand Up @@ -151,7 +151,7 @@ tf_apply <- function(x, axis, tf_fun_name) {
# permute the tensor to get the non-batch dim first, do the relevant
# "unsorted_segment_*" op, then permute it back
tf_tapply <- function(x, segment_ids, num_segments, op_name) {
op_name <- paste0("unsorted_segment_", op_name)
op_name <- glue::glue("unsorted_segment_{op_name}")

x <- tf$transpose(x, perm = c(1:2, 0L))
x <- tf$math[[op_name]](x,
Expand Down

0 comments on commit 112a968

Please sign in to comment.