-
Notifications
You must be signed in to change notification settings - Fork 3
/
streamlit_app.py
165 lines (133 loc) · 5.8 KB
/
streamlit_app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
import streamlit as st
import pandas as pd
from st_files_connection import FilesConnection
"# Streamlit FilesConnection"
"""
A simple demo for Streamlit FilesConnection.
"""
df = pd.DataFrame({"Owner": ["jerry", "barbara", "alex"], "Pet": ["fish", "cat", "puppy"], "Count": [4, 2, 1]})
local, s3, gcs = st.tabs(["Local", "S3", "GCS"])
with local:
"### Local Access"
with st.echo():
conn = st.experimental_connection("local", type=FilesConnection)
st.help(conn)
with st.echo():
with st.expander("View the repo license with help from FilesConnection"):
license = conn.read('../LICENSE', input_format='text')
license
with s3:
"### Reading data from S3"
"""
Write some test files to a S3 bucket and read them back. To run this code you need to:
- Ensure s3fs is installed (should be already if you did `pip install -r requirements.txt`)
- Set up credentials to an S3 bucket - either using default AWS configuration or via `.streamlit/secrets.toml`
- Set your correct bucket in the text input below and hit the checkbox!
#### Streamlit secrets example
```toml
# .streamlit/secrets.toml
[connections.s3]
key = "..."
secret = "..."
```
"""
s3_bucket = st.text_input("S3 Bucket:", value="st-connection-test")
if st.checkbox("Run the S3 code"):
with st.echo():
conn = st.experimental_connection("s3", type=FilesConnection)
st.help(conn)
with st.expander("Setup code"):
with st.echo():
text_file = f"{s3_bucket}/test.txt"
csv_file = f"{s3_bucket}/test.csv"
parquet_file = f"{s3_bucket}/test.parquet"
try:
_ = conn.read(text_file, input_format='text')
except FileNotFoundError:
with conn.open(text_file, "wt") as f:
f.write("This is a test")
try:
_ = conn.read(csv_file, input_format='csv')
except FileNotFoundError:
with conn.open(csv_file, "wt") as f:
df.to_csv(f, index=False)
try:
_ = conn.read(parquet_file, input_format='parquet')
except FileNotFoundError:
with conn.open(parquet_file, "wb") as f:
df.to_parquet(f)
"#### Text files"
with st.echo():
# "s3://" is optional here, just included for effect
st.write(conn.read(f"s3://{s3_bucket}/test.txt", input_format='text'))
"#### CSV Files"
with st.echo():
st.write(conn.read(f"s3://{s3_bucket}/test.csv", input_format='csv'))
"#### Parquet Files"
with st.echo():
st.write(conn.read(f"s3://{s3_bucket}/test.parquet", input_format='parquet'))
"#### List operations"
with st.echo():
st.write(conn.fs.ls(f"s3://{s3_bucket}/"))
with gcs:
"### Reading data from Google Cloud Storage"
"""
Write some test files to a GCS bucket and read them back. To run this code you need to:
- Ensure gcsfs is installed (should be already if you did `pip install -r requirements.txt`)
- Set up credentials to an GCS bucket using `.streamlit/secrets.toml`
- Alternatively, you can pass a `token=` argument to the connection constructor with a path to your google token file
- Set your correct bucket in the text input below and hit the checkbox!
#### Streamlit secrets example
```toml
# .streamlit/secrets.toml
[connections.gcs]
type = "..."
project_id = "..."
private_key_id = "..."
private_key = "-----BEGIN PRIVATE KEY-----..."
client_email = "..."
client_id = "..."
auth_uri = "https://accounts.google.com/o/oauth2/auth"
token_uri = "https://oauth2.googleapis.com/token"
auth_provider_x509_cert_url = "https://www.googleapis.com/oauth2/v1/certs"
client_x509_cert_url = "..."
```
"""
gcs_bucket = st.text_input("GCS Bucket:", value="st-connection-test")
if st.checkbox("Run the GCS code"):
with st.echo():
conn = st.experimental_connection("gcs", type=FilesConnection)
st.help(conn)
with st.expander("Setup code"):
with st.echo():
text_file = f"{gcs_bucket}/test3.txt"
csv_file = f"{gcs_bucket}/test3.csv"
parquet_file = f"{gcs_bucket}/test3.parquet"
try:
_ = conn.read(text_file, input_format='text')
except FileNotFoundError:
with conn.open(text_file, "wt") as f:
f.write("This is a test")
try:
_ = conn.read(csv_file, input_format='csv')
except FileNotFoundError:
with conn.open(csv_file, "wt") as f:
df.to_csv(f, index=False)
try:
_ = conn.read(parquet_file, input_format='parquet')
except FileNotFoundError:
with conn.open(parquet_file, "wb") as f:
df.to_parquet(f)
"#### Text files"
with st.echo():
# "gcs://" is optional here, just included for effect
st.write(conn.read(f"gcs://{gcs_bucket}/test3.txt", input_format='text'))
"#### CSV Files"
with st.echo():
st.write(conn.read(f"gcs://{gcs_bucket}/test3.csv", input_format='csv'))
"#### Parquet Files"
with st.echo():
st.write(conn.read(f"gcs://{gcs_bucket}/test3.parquet", input_format='parquet'))
"#### List operations"
with st.echo():
st.write(conn.fs.ls(f"gcs://{gcs_bucket}/"))