Skip to content

Commit

Permalink
Release v0.0.0.9001
Browse files Browse the repository at this point in the history
  • Loading branch information
Dmytro Perepolkin committed Jul 27, 2018
1 parent 01516d2 commit b3ea36c
Show file tree
Hide file tree
Showing 18 changed files with 668 additions and 90 deletions.
1 change: 1 addition & 0 deletions .Rbuildignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
^\.Rproj\.user$
^README\.Rmd$
^data-raw$
^LICENSE\.md$
14 changes: 13 additions & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
Package: polite
Version: 0.0.0.9000
Version: 0.0.0.9001
Title: Be nice on the web
Description: Be responsible when scraping data from APIs or websites. Remember to itroduce yourself, ask for permission and stay well mannered while you do what you came for.
Authors@R: person("Dmytro", "Perepolkin", email = "dperepolkin@gmail.com", role = c("aut", "cre"))
Expand All @@ -9,3 +9,15 @@ LazyData: true
ByteCompile: true
URL: https://github.com/dmi3kno/polite
BugReports: https://github.com/dmi3kno/polite/issues
Roxygen: list(markdown = TRUE)
RoxygenNote: 6.0.1
Imports:
httr,
magrittr,
memoise,
ratelimitr,
robotstxt,
stats,
urltools
Suggests:
rvest
2 changes: 2 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
YEAR: 2018
COPYRIGHT HOLDER: Dmytro Perepolkin
21 changes: 21 additions & 0 deletions LICENSE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# MIT License

Copyright (c) 2018 Dmytro Perepolkin

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
25 changes: 23 additions & 2 deletions NAMESPACE
Original file line number Diff line number Diff line change
@@ -1,2 +1,23 @@
# Generated by roxygen2: fake comment so roxygen2 overwrites silently.
exportPattern("^[^\\.]")
# Generated by roxygen2: do not edit by hand

export("%>%")
export("%||%")
export(bow)
export(nod)
export(scrape)
importFrom(httr,GET)
importFrom(httr,add_headers)
importFrom(httr,config)
importFrom(httr,content)
importFrom(httr,handle)
importFrom(httr,http_error)
importFrom(httr,warn_for_status)
importFrom(magrittr,"%>%")
importFrom(memoise,forget)
importFrom(ratelimitr,limit_rate)
importFrom(ratelimitr,rate)
importFrom(robotstxt,robotstxt)
importFrom(stats,na.omit)
importFrom(urltools,domain)
importFrom(urltools,suffix_extract)
importFrom(urltools,url_parse)
15 changes: 15 additions & 0 deletions NEWS
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
polite v0.0.0.9001 (Release date: 2018-07-27)
==============

Changes:

* Implemented bow(), nod() and scrape()
* Added documentation and examples
* Updated README to include examples

polite v0.0.0.9000 (Release date: 2018-07-23)
==============

Changes:

* Polite is born!
73 changes: 73 additions & 0 deletions R/bow.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
# memoise functiton
robotstxt_memoise <- memoise::memoise(robotstxt::robotstxt)

#' Inroduce yourself to the host
#'
#' @param url url
#' @param user_agent character value passed to user_agent string
#' @param force refresh all memoised functions. Clears up all robotstxt and scrape cache. Default is FALSE.
#' @param ... other curl parameters wrapped into httr::config function
#'
#' @return object of class `polite`, `session`
#'
#' @examples
#' \dontrun{
#' library(polite)
#'
#' host <- "https://www.cheese.com"
#' session <- bow(host)
#' session
#' }
#' @importFrom urltools domain suffix_extract url_parse
#' @importFrom robotstxt robotstxt
#' @importFrom httr handle config add_headers
#' @importFrom memoise forget
#' @importFrom stats na.omit
#' @export
bow <- function(url,
user_agent = "polite R package - https://github.com/dmi3kno/polite",
force = FALSE,
...){
stopifnot(is.character(user_agent), length(user_agent) == 1) # write meaningful error ref Lionel talk
stopifnot(is.character(url), length(url) == 1) # write meaningful error ref Lionel talk

if(force){
memoise::forget(robotstxt::robotstxt)
memoise::forget(scrape)
}

url_parsed <- urltools::url_parse(url)

url_df <- urltools::suffix_extract(url_parsed$domain)
url_subdomain <- paste(na.omit(c(url_df$subdomain[1],
url_df$domain[1],
url_df$suffix[1])), collapse=".")
rt <- robotstxt_memoise(domain = url_subdomain,
user_agent = user_agent, warn=FALSE)
if(!nrow(rt$permissions)){
url_domain <- paste(stats::na.omit(c(url_df$domain[1],
url_df$suffix[1])), collapse=".")
rt <- robotstxt_memoise(domain = url_domain,
user_agent = user_agent)
}

self <- structure(
list(
handle = httr::handle(url),
config = c(httr::config(autoreferer = 1L),
httr::add_headers("user-agent"=user_agent),...),
url = url,
back = character(),
forward = character(),
response = NULL,
html = new.env(parent = emptyenv(), hash = FALSE),
user_agent = user_agent,
domain = url_subdomain,
robotstxt= rt
),
class = c("polite", "session")
)

self
}

32 changes: 32 additions & 0 deletions R/nod.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#' Agree modification of session path with the host
#'
#' @param bow object of class `polite`, `session` created by `polite::bow()`
#' @param path string value of path/url to follow. The function accepts both path (string part of url followin domain name) or a full url.
#'
#' @return object of class `polite`, `session` with modified url
#'
#' @examples
#' \dontrun{
#' library(polite)
#'
#' host <- "https://www.cheese.com"
#' session <- bow(host) %>%
#' nod(path="by_type")
#' session
#' }
#' @export
nod <- function(bow, path){

if(!inherits(bow, "polite"))
stop("Please, bow before you nod")

# if user supplied url instead of path
if(grepl("://|www\\.", path)){
if(urltools::domain(path)!=bow$domain)
nod <- bow(url = url, user_agent = bow$user_agent, bow$config)
path <- urltools::path(path)
}
urltools::path(bow$url) <- path

nod
}
114 changes: 114 additions & 0 deletions R/scrape.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
#' @importFrom httr http_error GET add_headers warn_for_status content
#' @importFrom ratelimitr rate limit_rate
m_scrape <- function(bow, params=NULL, accept="html", period=1, verbose=FALSE) { # nolint

httr_get <- function(bow){
httr::GET(
url = bow$url,
config = bow$config,
handle = bow$handle
)
}

httr_get_ltd <- ratelimitr::limit_rate(
httr_get,
ratelimitr::rate(n = 1, period = period)
)

if(!inherits(bow, "polite"))
stop("Please, be polite: bow and scrape!")


if(!is.null(params))
urltools::parameters(bow$url) <- params

url_parsed <- urltools::url_parse(bow$url)

if(!bow$robotstxt$check(path=url_parsed$path[1], bot=bow$user_agent)){
message("No scraping allowed here!")
return(NULL)
}

if(substr(accept,1,1)=="." || grepl("/", accept)){
accept_type <- httr::accept(accept)
} else{
accept_type <- httr::accept(paste0(".", accept))
}

bow$config <- c(bow$config, accept_type)

response <- httr_get_ltd(bow)
max_attempts <- 3

att_msg <- c(rep("",max_attempts-1),
"This is the last attempt, if it fails will return NULL")

try_number <- 1
while (httr::http_error(response) && try_number < max_attempts) {
try_number <- try_number + 1
if (verbose)
message(paste0("Attempt number ", try_number,".", att_msg[[try_number]]))

Sys.sleep(2^try_number)
response <- httr_get_ltd(bow)
}

status_warn_msg <- paste("fetch data from", bow$url)
httr::warn_for_status(response, status_warn_msg)


res <- httr::content(response, type = response$headers$`content-type`)
res
}


#' Scrape the content of authorized page/API
#'
#' @param bow host introduction object of class `polite`, `session` created by `bow()` or `nod()`
#' @param params character vector of parameters to be appended to url in the format "parameter=value"
#' @param accept character value of expected data type to be returned by host (e.g. "html", "json", "xml", "csv", "txt", etc)
#' @param period time-out between requests in seconds. Can not be less than 1
#' @param verbose extra feedback from the function. Defaults to FALSE
#'
#' @return Onbject of class `httr::response` which can be further processed by functions in `rvest` package
#'
#' @examples
#' \dontrun{
#' library(rvest)
#' biases <- bow("https://en.wikipedia.org/wiki/List_of_cognitive_biases") %>%
#' scrape() %>%
#' html_nodes(".wikitable") %>%
#' html_table()
#' biases
#' }
#'
#'
#' \dontrun{
#' library(rvest)
#' library(polite)
#'
#' host <- "https://www.cheese.com"
#' session <- bow(host)
#'
#' # scrape pages by re-authenticating on new page and scraping with parameters
#' get_cheese <- function(session, path, params){
#' nod(session, path) %>%
#' scrape(params)
#' }
#'
#' res <- vector("list", 5)
#' # iterate over first 5 pages
#' for (i in seq(5)){
#' res[[i]] <- get_cheese(session,
#' path = "alphabetical",
#' params = paste0("page=", i)) %>%
#' html_nodes("h3 a") %>%
#' html_text()
#'
#' }
#' res
#' }
#'
#' @export
scrape <- memoise::memoise(m_scrape)

24 changes: 24 additions & 0 deletions R/utils.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#' Null coalescing operator
#'
#' See \code{purrr::\link[purrr]{\%||\%}} for details.
#'
#' @name null-coalesce
#' @rdname nullcoalesce
#' @keywords internal
#' @export
#' @usage lhs \%||\% rhs
"%||%" <- function(lhs, rhs) {
if (!is.null(lhs) && length(lhs) > 0) lhs else rhs
}

#' Pipe operator
#'
#' See \code{magrittr::\link[magrittr]{\%>\%}} for details.
#'
#' @name %>%
#' @rdname pipe
#' @keywords internal
#' @export
#' @importFrom magrittr %>%
#' @usage lhs \%>\% rhs
NULL
Loading

0 comments on commit b3ea36c

Please sign in to comment.