@@ -50,7 +117,7 @@ Run `revdep_details(, "openmetrics")` for more info
* GitHub: https://github.com/ropensci/rjsonapi
* Source code: https://github.com/cran/rjsonapi
* Date/Publication: 2017-01-09 01:47:26
-* Number of recursive dependencies: 46
+* Number of recursive dependencies: 58
Run `revdep_details(, "rjsonapi")` for more info
diff --git a/revdep/revdep_cran.md b/revdep/revdep_cran.md
index 197327e39..613992c71 100644
--- a/revdep/revdep_cran.md
+++ b/revdep/revdep_cran.md
@@ -1,6 +1,6 @@
## revdepcheck results
-We checked 8 reverse dependencies, comparing R CMD check results across CRAN and dev versions of this package.
+We checked 10 reverse dependencies, comparing R CMD check results across CRAN and dev versions of this package.
* We saw 0 new problems
* We failed to check 0 packages
diff --git a/scripts/release.md b/scripts/release.md
index c2c5a594b..7a5e45cb1 100644
--- a/scripts/release.md
+++ b/scripts/release.md
@@ -21,5 +21,5 @@
1. Submit to CRAN.
1. `devtools::release()` (actually release)
1. Do any revisions CRAN requests on the release branch
-1. Once accepted to CRAN, merge the release branch to master and tag the release.
-1. Bump the version # in DESCRIPTION to the next odd number on master for development of next release.
+1. Once accepted to CRAN, merge the release branch to main and tag the release.
+1. Bump the version # in DESCRIPTION to the next odd number on main for development of next release.
diff --git a/tests/spelling.R b/tests/spelling.R
new file mode 100644
index 000000000..6713838fc
--- /dev/null
+++ b/tests/spelling.R
@@ -0,0 +1,3 @@
+if(requireNamespace('spelling', quietly = TRUE))
+ spelling::spell_check_test(vignettes = TRUE, error = FALSE,
+ skip_on_cran = TRUE)
diff --git a/tests/testthat/files/endpoints.R b/tests/testthat/files/endpoints.R
index e96dbe0ef..f287feda6 100644
--- a/tests/testthat/files/endpoints.R
+++ b/tests/testthat/files/endpoints.R
@@ -9,7 +9,7 @@ function(req, res, forward){
#* @get /test
#* @post /test
#* @more stuff
-#' @param req Roxygen params
+#' @param req roxygen2 params
#* hey
foo <- function(a, b, ..., req, res, forward){
5
diff --git a/tests/testthat/files/router.R b/tests/testthat/files/router.R
index ad912d874..a52a543f9 100644
--- a/tests/testthat/files/router.R
+++ b/tests/testthat/files/router.R
@@ -42,13 +42,20 @@ function(){
stop("ERROR")
}
-#* @get /response
+#* @get /response123
function(res){
res$body <- "overridden"
res$status <- 123
res
}
+#* @get /response200
+function(res){
+ res$body <- "overridden"
+ res$status <- 200
+ res
+}
+
#* @get /path1
#* @get /path2
function(){
diff --git a/tests/testthat/test-endpoint-aroundexec.R b/tests/testthat/test-endpoint-aroundexec.R
index a4652657e..b85c9e385 100644
--- a/tests/testthat/test-endpoint-aroundexec.R
+++ b/tests/testthat/test-endpoint-aroundexec.R
@@ -128,7 +128,7 @@ test_that("serializers can register all pre,post,aroundexec stages", {
test_that("not producing an image produces an error", {
- skip_on_os("win")
+ skip_on_os("windows")
root <- with_tmp_serializers({
plumb(test_path("files/endpoint-serializer.R")) %>% pr_set_debug(TRUE)
diff --git a/tests/testthat/test-parse-body.R b/tests/testthat/test-parse-body.R
index f2054e8e8..ec2b37703 100644
--- a/tests/testthat/test-parse-body.R
+++ b/tests/testthat/test-parse-body.R
@@ -90,7 +90,7 @@ test_that("Test tsv parser", {
})
test_that("Test feather parser", {
- skip_if_not_installed("feather")
+ skip_if_not_installed("arrow")
tmp <- tempfile()
on.exit({
@@ -98,10 +98,10 @@ test_that("Test feather parser", {
}, add = TRUE)
r_object <- iris
- feather::write_feather(r_object, tmp)
+ arrow::write_feather(r_object, tmp)
val <- readBin(tmp, "raw", 10000)
- parsed <- parse_body(val, "application/feather", make_parser("feather"))
+ parsed <- parse_body(val, "application/vnd.apache.arrow.file", make_parser("feather"))
# convert from feather tibble to data.frame
parsed <- as.data.frame(parsed, stringsAsFactors = FALSE)
attr(parsed, "spec") <- NULL
@@ -109,6 +109,60 @@ test_that("Test feather parser", {
expect_equal(parsed, r_object)
})
+test_that("Test parquet parser", {
+ skip_if_not_installed("arrow")
+
+ tmp <- tempfile()
+ on.exit({
+ file.remove(tmp)
+ }, add = TRUE)
+
+ r_object <- iris
+ arrow::write_parquet(r_object, tmp)
+ val <- readBin(tmp, "raw", 10000)
+
+ parsed <- parse_body(val, "application/vnd.apache.parquet", make_parser("parquet"))
+ # convert from parquet tibble to data.frame
+ parsed <- as.data.frame(parsed, stringsAsFactors = FALSE)
+ attr(parsed, "spec") <- NULL
+
+ expect_equal(parsed, r_object)
+})
+
+test_that("Test geojson parser", {
+ skip_if_not_installed("geojsonsf")
+ skip_if_not_installed("sf")
+
+ # Test sf object w/ fields
+ geojson <- '{"type":"FeatureCollection","features":[{"type":"Feature","properties":{"a":3},"geometry":{"type":"Point","coordinates":[1,2]}},{"type":"Feature","properties":{"a":4},"geometry":{"type":"Point","coordinates":[3,4]}}]}'
+ parsed <- parse_body(geojson, "application/geo+json", make_parser("geojson"))
+ expect_equal(parsed, geojsonsf::geojson_sf(geojson))
+
+ # Test sfc
+ geojson <- '[
+ { "type":"Point","coordinates":[0,0]},
+ {"type":"LineString","coordinates":[[0,0],[1,1]]}
+ ]'
+ parsed <- parse_body(geojson, "application/geo+json", make_parser("geojson"))
+ expect_equal(parsed, geojsonsf::geojson_sf(geojson))
+
+ # Test simple sf object
+ geojson <- '{ "type" : "Point", "coordinates" : [0, 0] }'
+ parsed <- parse_body(geojson, "application/geo+json", make_parser("geojson"))
+ expect_equal(parsed, geojsonsf::geojson_sf(geojson))
+
+ # Test geojson file
+ tmp <- tempfile()
+ on.exit({
+ file.remove(tmp)
+ }, add = TRUE)
+
+ writeLines(geojson, tmp)
+ val <- readBin(tmp, "raw", 1000)
+ parsed <- parse_body(val, "application/geo+json", make_parser("geojson"))
+ expect_equal(parsed, geojsonsf::geojson_sf(geojson))
+
+})
test_that("Test multipart output is reduced for argument matching", {
bin_file <- test_path("files/multipart-file-names.bin")
diff --git a/tests/testthat/test-plumber.R b/tests/testthat/test-plumber.R
index b141a531f..f87a71ae8 100644
--- a/tests/testthat/test-plumber.R
+++ b/tests/testthat/test-plumber.R
@@ -63,15 +63,15 @@ test_that("plumb accepts a file", {
test_that("plumb gives a good error when passing in a dir instead of a file", {
+ # brittle test. Fails on r-devel-windows-x86_64-gcc10-UCRT
+ skip_on_cran()
+
if (isWindows()) {
- # https://stat.ethz.ch/R-manual/R-devel/library/base/html/files.html
- # "However, directory names must not include a trailing backslash or slash on Windows"
- # Appveyor does not work with "files/", but does trigger the proper error with "files\\"
- expect_error(plumb(test_path("files\\")), "File does not exist:")
- } else {
- expect_error(plumb(test_path("files/")), "Expecting a file but found a directory: 'files/'")
+ # File paths are hard to work with and are inconsistent
+ skip_on_os("windows")
}
+ expect_error(plumb(test_path("files/")), "Expecting a file but found a directory: 'files/'")
})
test_that("plumb accepts a directory with a `plumber.R` file", {
diff --git a/tests/testthat/test-response.R b/tests/testthat/test-response.R
index c459f02e2..b3cec4611 100644
--- a/tests/testthat/test-response.R
+++ b/tests/testthat/test-response.R
@@ -44,3 +44,10 @@ test_that("response properly sets cookies with multiple options", {
head <- res$toResponse()$headers
expect_equal(head[["Set-Cookie"]], "abc=two%20words; HttpOnly; Secure; SameSite=None")
})
+
+test_that("Body on HTTP responses which forbid it", {
+ res <- PlumberResponse$new()
+ res$body <- "Hello"
+ res$status <- 204
+ expect_null(res$toResponse()$body)
+})
diff --git a/tests/testthat/test-serializer-device.R b/tests/testthat/test-serializer-device.R
index e747f50d5..f58aa2e63 100644
--- a/tests/testthat/test-serializer-device.R
+++ b/tests/testthat/test-serializer-device.R
@@ -29,7 +29,7 @@ expect_device_output <- function(name, content_type, capability_type = name) {
if (!is.null(capability_type)) {
if (!capabilities(capability_type)) {
- testthat::skip("Graphics device type not supported: ", name)
+ testthat::skip(paste0("Graphics device type not supported: ", name))
}
}
diff --git a/tests/testthat/test-serializer-feather.R b/tests/testthat/test-serializer-feather.R
index c191997da..cfa4bc591 100644
--- a/tests/testthat/test-serializer-feather.R
+++ b/tests/testthat/test-serializer-feather.R
@@ -1,15 +1,15 @@
context("feather serializer")
test_that("feather serializes properly", {
- skip_if_not_installed("feather")
+ skip_if_not_installed("arrow")
d <- data.frame(a=1, b=2, c="hi")
val <- serializer_feather()(d, data.frame(), PlumberResponse$new(), stop)
expect_equal(val$status, 200L)
- expect_equal(val$headers$`Content-Type`, "application/feather")
+ expect_equal(val$headers$`Content-Type`, "application/vnd.apache.arrow.file")
# can test by doing a full round trip if we believe the parser works via `test-parse-body.R`
- parsed <- parse_body(val$body, "application/feather", make_parser("feather"))
+ parsed <- parse_body(val$body, "application/vnd.apache.arrow.file", make_parser("feather"))
# convert from feather tibble to data.frame
parsed <- as.data.frame(parsed, stringsAsFactors = FALSE)
attr(parsed, "spec") <- NULL
@@ -18,7 +18,7 @@ test_that("feather serializes properly", {
})
test_that("Errors call error handler", {
- skip_if_not_installed("feather")
+ skip_if_not_installed("arrow")
errors <- 0
errHandler <- function(req, res, err){
@@ -31,7 +31,7 @@ test_that("Errors call error handler", {
})
test_that("Errors are rendered correctly with debug TRUE", {
- skip_if_not_installed("feather")
+ skip_if_not_installed("arrow")
pr <- pr() %>% pr_get("/", function() stop("myerror"), serializer = serializer_feather()) %>% pr_set_debug(TRUE)
capture.output(res <- pr$serve(make_req(pr = pr), PlumberResponse$new("csv")))
diff --git a/tests/testthat/test-serializer-geojson.R b/tests/testthat/test-serializer-geojson.R
new file mode 100644
index 000000000..6ac6eaad6
--- /dev/null
+++ b/tests/testthat/test-serializer-geojson.R
@@ -0,0 +1,35 @@
+test_that("GeoJSON serializes properly", {
+ skip_if_not_installed("geojsonsf")
+ skip_if_not_installed("sf")
+
+ # Objects taken from ?st_sf() examples.
+ sfc <- sf::st_sfc(sf::st_point(1:2), sf::st_point(3:4))
+ sf <- sf::st_sf(a = 3:4, sfc)
+
+ # Test sfc
+ val <- serializer_geojson()(sfc, data.frame(), PlumberResponse$new(), stop)
+ expect_equal(val$status, 200L)
+ expect_equal(val$headers$`Content-Type`, "application/geo+json")
+ expect_equal(val$body, geojsonsf::sfc_geojson(sfc))
+
+ # Test sf
+ val <- serializer_geojson()(sf, data.frame(), PlumberResponse$new(), stop)
+ expect_equal(val$status, 200L)
+ expect_equal(val$headers$`Content-Type`, "application/geo+json")
+ expect_equal(val$body, geojsonsf::sf_geojson(sf))
+
+})
+
+test_that("Errors call error handler", {
+ skip_if_not_installed("geojsonsf")
+ skip_if_not_installed("sf")
+
+ errors <- 0
+ errHandler <- function(req, res, err){
+ errors <<- errors + 1
+ }
+
+ expect_equal(errors, 0)
+ serializer_geojson()(parse(text="h$534i} {!"), data.frame(), PlumberResponse$new(), errorHandler = errHandler)
+ expect_equal(errors, 1)
+})
diff --git a/tests/testthat/test-serializer-htmlwidgets.R b/tests/testthat/test-serializer-htmlwidgets.R
index b0f36a992..5e5105270 100644
--- a/tests/testthat/test-serializer-htmlwidgets.R
+++ b/tests/testthat/test-serializer-htmlwidgets.R
@@ -22,7 +22,7 @@ test_that("htmlwidgets serialize properly", {
expect_equal(val$status, 200L)
expect_equal(val$headers$`Content-Type`, "text/html; charset=UTF-8")
# Check that content is encoded
- expect_match(val$body, "url(data:image/png;base64", fixed = TRUE)
+ expect_match(val$body, "url\\(['\"]?data:image\\/png;base64")
})
test_that("Errors call error handler", {
diff --git a/tests/testthat/test-serializer.R b/tests/testthat/test-serializer.R
index 3dc7c35e5..081d82ac0 100644
--- a/tests/testthat/test-serializer.R
+++ b/tests/testthat/test-serializer.R
@@ -4,9 +4,12 @@ test_that("Responses returned directly aren't serialized", {
res <- PlumberResponse$new("")
r <- pr(test_path("files/router.R"))
- val <- r$serve(make_req("GET", "/response"), res)
- expect_equal(val$body, "overridden")
+ val <- r$serve(make_req("GET", "/response123"), res)
+ expect_equal(val$body, NULL)
expect_equal(val$status, 123)
+ val <- r$serve(make_req("GET", "/response200"), res)
+ expect_equal(val$body, "overridden")
+ expect_equal(val$status, 200)
})
test_that("JSON is the default serializer", {
diff --git a/tests/testthat/test-shared-secret.R b/tests/testthat/test-shared-secret.R
index a2b77fea3..6f882f3c2 100644
--- a/tests/testthat/test-shared-secret.R
+++ b/tests/testthat/test-shared-secret.R
@@ -5,12 +5,23 @@ test_that("requests with shared secrets pass, w/o fail", {
pr <- pr()
pr$handle("GET", "/", function(){ 123 })
+ req <- make_req("GET", "/", pr = pr)
# No shared secret
- req <- make_req("GET", "/")
res <- PlumberResponse$new()
- capture.output(pr$route(req, res))
+ output <- pr$route(req, res)
expect_equal(res$status, 400)
+ expect_equal(output, list(error = "400 - Bad request"))
+
+ # When debugging, we get additional details in the error.
+ pr$setDebug(TRUE)
+ res <- PlumberResponse$new()
+ output <- pr$route(req, res)
+ expect_equal(res$status, 400)
+ expect_equal(output, list(
+ error = "400 - Bad request",
+ message = "Shared secret mismatch"))
+ pr$setDebug(FALSE)
# Set shared secret
assign("HTTP_PLUMBER_SHARED_SECRET", "abcdefg", envir=req)
diff --git a/tests/testthat/test-static.R b/tests/testthat/test-static.R
index 3ad92054b..c481f9cb5 100644
--- a/tests/testthat/test-static.R
+++ b/tests/testthat/test-static.R
@@ -17,6 +17,22 @@ test_that("static txt file is served", {
expect_equal(trimws(rawToChar(res$body)), "I am a text file.")
})
+test_that("static txt file with encoded URI is served", {
+
+ # Some file systems cannot handle these characters.
+ testthat::skip_on_cran()
+
+ res <- PlumberResponse$new()
+ f <- test_path("files/static/测试.txt")
+ file.create(f)
+ on.exit(unlink(f), add = TRUE)
+ writeChar("here be dragons", f)
+ pr$route(make_req("GET", "/测试.txt"), res)
+ unlink(test_path("files/static/测试.txt"))
+ expect_equal(res$headers$`Content-Type`, "text/plain")
+ expect_equal(trimws(rawToChar(res$body)), "here be dragons")
+})
+
test_that("static html file is served", {
res <- PlumberResponse$new()
pr$route(make_req("GET", "/index.html"), res)
diff --git a/tests/testthat/test-zzz-openapi.R b/tests/testthat/test-zzz-openapi.R
index 6c9fcc177..0b102fb0a 100644
--- a/tests/testthat/test-zzz-openapi.R
+++ b/tests/testthat/test-zzz-openapi.R
@@ -336,7 +336,7 @@ test_that("no params plumber router still produces spec when there is a func par
test_that("Response content type set with serializer", {
a <- pr()
- pr_get(a, "/json", function() {"OK"}, serializer = serializer_json)
+ pr_get(a, "/json", function() {"OK"}, serializer = serializer_json())
pr_get(a, "/csv", function() {"OK"}, serializer = serializer_csv())
spec <- a$getApiSpec()
expect_equal(spec$paths$`/json`$get$responses$`200`$content, list("application/json" = list(schema = list(type = "object"))))
diff --git a/vignettes/_ex-github.Rmd b/vignettes/_ex-github.Rmd
index 55b30f667..96de7c7c3 100644
--- a/vignettes/_ex-github.Rmd
+++ b/vignettes/_ex-github.Rmd
@@ -33,7 +33,7 @@ At this point, any commits that are pushed to that repository will trigger a POS
In this example, we'll demonstrate how to setup an plumber endpoint that is capable of listening for Webhook notifications from GitHub. The example will simply subscribe to `push` notifications on the plumber repository (which are triggered any time a commit is pushed to that repo) and, in response, will install the most up-to-date version of plumber.
-We'll add one additional endpoint that enables us to see what version of plumber is installed on the system at that moment. You should find that the `sha1` value of the response matches the latest commit hash in the master branch of plumber.
+We'll add one additional endpoint that enables us to see what version of plumber is installed on the system at that moment. You should find that the `sha1` value of the response matches the latest commit hash in the main branch of plumber.
diff --git a/vignettes/annotations.Rmd b/vignettes/annotations.Rmd
index 57604e493..46210db82 100644
--- a/vignettes/annotations.Rmd
+++ b/vignettes/annotations.Rmd
@@ -13,7 +13,7 @@ source("_helpers.R")
## Annotations {#annotations}
-Annotations are specially-structured comments used in your plumber file to create an API. A full annotation line starts with `#*` or `#'`, then the annotation keyword `@...`, any number of space characters followed by the content. It is recommended to use `#*` to differentiate them from `Roxygen2` annotations.
+Annotations are specially-structured comments used in your plumber file to create an API. A full annotation line starts with `#*` or `#'`, then the annotation keyword `@...`, any number of space characters followed by the content. It is recommended to use `#*` to differentiate them from `roxygen2` annotations.
## Global annotations {#global-annotations}
@@ -74,7 +74,7 @@ Annotation | Argument | Description/References
`@parser` | `Alias` `[Args list]` | Some parsers accept arguments. See [parsers reference](https://www.rplumber.io/reference/parsers.html). Can be repeated to allow multiple parsers on the same endpoint. Aliases : `r paste0("", registered_parsers(), "
", collapse = ", ")` from [`registered_parsers()`](https://www.rplumber.io/reference/register_parser.html).
`@param` | `Name`[`:Type` `Description`] | Enclose `Type` between square brackets `[]` to indicate it is an array. Can be repeated to define different parameters.
`@response` | `Name` `Description` | Simple [Response object](http://spec.openapis.org/oas/v3.0.3#response-object). Can be repeated to define different responses.
-`@tags` | `Tag` | Can be repeated to add multiple tags. Quote with " or ' to use non word character (like spaces) in `Tag`. [Tag field](http://spec.openapis.org/oas/v3.0.3#operation-object)
+`@tag` | `Tag` | Can be repeated to add multiple tags. Quote with " or ' to use non word character (like spaces) in `Tag`. [Tag field](http://spec.openapis.org/oas/v3.0.3#operation-object)
`@preempt` | `Filter` | Specify that this endpoint has to execute before `Filter`. [Filters](./programmatic-usage.html#defining-filters)
None | `Comments` | Lines without annotation will be mapped to [Summary field](http://spec.openapis.org/oas/v3.0.3#fixed-fields-6).
@@ -85,9 +85,9 @@ Types are used to define API inputs. You can use most of them in dynamic routes.
Query parameters currently need to be explicitly converted as they are pushed as is (character) to block expression.
Only dynamic route parameters are converted to specified `@param` type before being pushed to block expression.
-Plumber parameter type to OpenAPI type refenrence. For programmatic use, pick the one with an asterisk.
+Plumber parameter type to OpenAPI type reference. For programmatic use, pick the one with an asterisk.
-Type | OpenApi | Availability
+Type | OpenAPI | Availability
---------------- | ----------- | ---------
`bool`, `boolean`*, `logical` | `boolean` | `query`, `path`
`dbl`, `double`, `float`, `number`*, `numeric` | `number` `format:double` | `query`, `path`
@@ -217,7 +217,7 @@ pr() %>%
Annotation | Arguments | Description/References
-----------| --------- | ----------------------
-`@plumber` | None | Modify plumber router from plumber file. The plumber router provided to the function **must** be returned.
+`@plumber` | None | Modify plumber router from plumber file. The plumber router provided to the function **must** be returned. In most cases, anonymous functions are used following the `#* @plumber` annotation. However, named functions can also be used. When a named function is used, it must be referenced without parentheses.
##### Annotations example
@@ -228,6 +228,16 @@ function(pr) {
pr_set_debug(TRUE) %>%
pr_set_docs("swagger")
}
+
+# Named function
+debug_swagger <- function(pr) {
+ pr %>%
+ pr_set_debug(TRUE) %>%
+ pr_set_docs("swagger")
+}
+
+#* @plumber
+debug_swagger
```
##### Equivalent programmatic usage
diff --git a/vignettes/execution-model.Rmd b/vignettes/execution-model.Rmd
index 7dc0a2871..6f87b9d11 100644
--- a/vignettes/execution-model.Rmd
+++ b/vignettes/execution-model.Rmd
@@ -82,7 +82,7 @@ If you'd like to use cookies to store information with guarantees that the user
The final option to consider when coordinating state for an API is leveraging an external data store. This could be a relational database (like MySQL or Amazon RedShift), a non-relational database (like MongoDB), or an transactional data store like Redis.
-One important consideration for any of these options is to ensure that they are "transactional," meaning that two Plumber processes trying to write at the same time won't overwrite one another. If you're interested in pursuing this option you should see [db.rstudio.com](http://db.rstudio.com/) or looks at [some](http://shiny.rstudio.com/articles/overview.html) of the [resources](http://shiny.rstudio.com/articles/sql-injections.html) [put together](http://shiny.rstudio.com/articles/pool-basics.html) for Shiny as pertains to dealing with databases in a web-accessible R platform.
+One important consideration for any of these options is to ensure that they are "transactional," meaning that two Plumber processes trying to write at the same time won't overwrite one another. If you're interested in pursuing this option you should see [db.rstudio.com](http://db.rstudio.com/) or look at [some](http://shiny.rstudio.com/articles/overview.html) of the [resources](http://shiny.rstudio.com/articles/sql-injections.html) [put together](http://shiny.rstudio.com/articles/pool-basics.html) for Shiny as pertains to dealing with databases in a web-accessible R platform.
## Exit Handlers
diff --git a/vignettes/hosting.Rmd b/vignettes/hosting.Rmd
index e2da05271..a8bc167d1 100644
--- a/vignettes/hosting.Rmd
+++ b/vignettes/hosting.Rmd
@@ -267,7 +267,7 @@ We can use the [dockercloud/haproxy](https://github.com/docker/dockercloud-hapro
The trick that allows this image to listen in to our scaling of `app1` is by passing in the docker socket as a shared volume. Note that this particular arrangement will differ based on your host OS. The above configuration is intended for Linux, but MacOS X users would require a [slightly different config](https://github.com/docker/dockercloud-haproxy#example-of-docker-composeyml-running-in-linux).
-We could export port `80` of our new load balancer to port `80` of our host machine if we solely wanted to load-balance a single application. Alternatively, we can actually use both nginx (to handle the routing of various applications) and HAProxy (to handle the load balancing of a particular application). To do that, we'd merely add a new `location` block to our `nginx.conf` file that knows how to send traffic to HAproxy, or modify the existing `location` block to send traffic to the load balancer instead of going directly to the application.
+We could export port `80` of our new load balancer to port `80` of our host machine if we solely wanted to load-balance a single application. Alternatively, we can actually use both nginx (to handle the routing of various applications) and HAProxy (to handle the load balancing of a particular application). To do that, we'd merely add a new `location` block to our `nginx.conf` file that knows how to send traffic to HAProxy, or modify the existing `location` block to send traffic to the load balancer instead of going directly to the application.
So the `location /app1/` block becomes:
diff --git a/vignettes/rendering-output.Rmd b/vignettes/rendering-output.Rmd
index bda83f923..2ef0f9d37 100644
--- a/vignettes/rendering-output.Rmd
+++ b/vignettes/rendering-output.Rmd
@@ -59,7 +59,8 @@ Annotation | Content Type | Description/References
`@serializer rds` | `application/rds` | Object processed with `base::serialize()`
`@serializer csv` | `text/csv` | Object processed with `readr::format_csv()`
`@serializer tsv` | `text/tab-separated-values` | Object processed with `readr::format_tsv()`
-`@serializer feather` | `application/feather` | Object processed with `feather::write_feather()`
+`@serializer feather` | `application/vnd.apache.arrow.file` | Object processed with `arrow::write_feather()`
+`@serializer parquet` | `application/parquet` | Object processed with `arrow::write_parquet()`
`@serializer yaml` | `text/x-yaml` | Object processed with `yaml::as_yaml()`
`@serializer htmlwidget` | `text/html; charset=utf-8` | `htmlwidgets::saveWidget()`
`@serializer text` | `text/plain` | Text output processed by `as.character()`
diff --git a/vignettes/security.Rmd b/vignettes/security.Rmd
index 0d8dc7af0..258724a42 100644
--- a/vignettes/security.Rmd
+++ b/vignettes/security.Rmd
@@ -138,7 +138,7 @@ In the [section on setting cookies](./rendering-output.html#setting-cookies), we
First and foremost, recognize that that the client has the ability to modify or fabricate the cookies that they send to your API. So storing preferences that the user themselves provided in a cookie is not a concern. Storing something with security implications -- like the identity of the user making requests -- however, would be; a malicious user would just need to modify the user ID saved in their cookie in order to trick your API into thinking that they were someone they're not.
-There are two common work-arounds to this concern. You can store all session information on the server identified by long, cryptographically random IDs and only rely on the cookie to store the ID. Or you can use signed/encrypted cookies, as detailed in the [section on setting encrypted cookies](./rendering-output.html#encrypted-cookies).
+There are two common workarounds to this concern. You can store all session information on the server identified by long, cryptographically random IDs and only rely on the cookie to store the ID. Or you can use signed/encrypted cookies, as detailed in the [section on setting encrypted cookies](./rendering-output.html#encrypted-cookies).
You should also be aware of how cookies will be handled and managed by clients. You can manage these properties by providing different parameters to the `setCookie()` call.