/
Chapter 1.Rmd
executable file
·67 lines (48 loc) · 1.38 KB
/
Chapter 1.Rmd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
---
title: "Chapter 1"
output: html_document
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
library(dplyr)
```
## The connect-work-disconnect pattern
```{r}
# Load sparklyr
library(sparklyr)
# Connect to your Spark cluster
spark_conn <- spark_connect(master ="spark://192.168.86.31:7077",
spark_home = "/usr/local/spark-2.4.0-bin-hadoop2.7")
# Print the version of Spark
spark_version(sc = spark_conn)
# Disconnect from Spark
spark_disconnect(sc = spark_conn)
```
## Read sqlite data
```{r eval = false}
library(DBI)
con <- dbConnect(drv = RSQLite::SQLite(), 'track_metadata.db')
dbListTables(con)
track_metadata <- dbReadTable(con, "songs")
# Disconnect from the database
dbDisconnect(con)
rm(con)
```
## Copying data into Spark
```{r eval = false}
# Load dplyr
library(dplyr)
# Explore track_metadata structure
str(track_metadata)
# Connect to your Spark cluster
spark_conn <- spark_connect(master ="spark://192.168.86.31:7077",
spark_home = "/usr/local/spark-2.4.0-bin-hadoop2.7")
# Copy iris to Spark
track_metadata_tbl <- copy_to(spark_conn, iris, overwrite = TRUE)
# Copy track_metadata to Spark
track_metadata_tbl <- copy_to(spark_conn, track_metadata, overwrite = TRUE) ### This does not work!
# List the data frames available in Spark
src_tbls(spark_conn)
# Disconnect from Spark
spark_disconnect(spark_conn)
```