Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: gcloud integration, fixes #18 #23

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
178 changes: 178 additions & 0 deletions integrations/gcloud/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore

# Logs

logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*

# Caches

.cache

# Diagnostic reports (https://nodejs.org/api/report.html)

report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json

# Runtime data

pids
_.pid
_.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover

lib-cov

# Coverage directory used by tools like istanbul

coverage
*.lcov

# nyc test coverage

.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)

.grunt

# Bower dependency directory (https://bower.io/)

bower_components

# node-waf configuration

.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)

build/Release

# Dependency directories

node_modules/
jspm_packages/

# Snowpack dependency directory (https://snowpack.dev/)

web_modules/

# TypeScript cache

*.tsbuildinfo

# Optional npm cache directory

.npm

# Optional eslint cache

.eslintcache

# Optional stylelint cache

.stylelintcache

# Microbundle cache

.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history

.node_repl_history

# Output of 'npm pack'

*.tgz

# Yarn Integrity file

.yarn-integrity

# dotenv environment variable files

.env
.env.development.local
.env.test.local
.env.production.local
.env.local

# parcel-bundler cache (https://parceljs.org/)

.parcel-cache

# Next.js build output

.next
out

# Nuxt.js build / generate output

.nuxt
dist

# Gatsby files

# Comment in the public line in if your project uses Gatsby and not Next.js

# https://nextjs.org/blog/next-9-1#public-directory-support

# public

# vuepress build output

.vuepress/dist

# vuepress v2.x temp and cache directory

.temp

# Docusaurus cache and generated files

.docusaurus

# Serverless directories

.serverless/

# FuseBox cache

.fusebox/

# DynamoDB Local files

.dynamodb/

# TernJS port file

.tern-port

# Stores VSCode versions used for testing VSCode extensions

.vscode-test

# yarn v2

.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

# IntelliJ based IDEs
.idea

# Finder (MacOS) folder config
.DS_Store

# Credentials file:
creds.json
46 changes: 46 additions & 0 deletions integrations/gcloud/Bigquery_Insert_Rows/script.bun.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { expect, test } from 'bun:test'
import { main } from './script.bun.ts'
import { BigQuery } from '@google-cloud/bigquery'
import { resource } from '../resource.ts'

// dataset and tables can be created manually on the GCloud website also.
// IMPORTANT NOTE: Streaming insert api is supported in paid tier only
// Else, throws up "error: Access Denied: BigQuery BigQuery: Streaming insert is not allowed in the free tier"
test('Bigquery Insert Rows', async () => {
// Create dataset and table first
const bigquery = new BigQuery({
credentials: resource,
projectId: resource.project_id
})

const datasetId = Math.random().toString(36).slice(2) + '_windmill_labs_dataset'
// Specify the geographic location where the dataset should reside
const datasetOptions = {
location: 'US'
}
// Create a new dataset
await bigquery.createDataset(datasetId, datasetOptions)

const tableId = Math.random().toString(36).slice(2) + '_windmill_labs_table'
const schema = 'firstName:string, lastName:string, organization:string'
const tableOptions = {
schema: schema,
location: 'US'
}
// Create a new table in the dataset
await bigquery.dataset(datasetId).createTable(tableId, tableOptions)

const response = await main(resource, {
datasetId: datasetId,
tableId: tableId,
rows: [
{ firstName: 'john', lastName: 'doe', organization: 'GCloud' },
{ firstName: 'abhishek', lastName: 'gupta', organization: 'Windmill_Labs_Community' }
]
})

expect(response).toBeDefined()

// delete the dataset
await bigquery.dataset(datasetId).delete({ force: true })
})
34 changes: 34 additions & 0 deletions integrations/gcloud/Bigquery_Insert_Rows/script.bun.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
// IMPORTANT NOTE: Streaming insert api is supported in paid tier only
import { BigQuery } from '@google-cloud/bigquery'

type Gcloud = {
type: string
project_id: string
private_key_id: string
private_key: string
client_email: string
client_id: string
auth_uri: string
token_uri: string
auth_provider_x509_cert_url: string
client_x509_cert_url: string
universe_domain: string
}

export async function main(
resource: Gcloud,
data: {
datasetId: string
tableId: string
rows: {
[key: string]: any
}
}
) {
const bigquery = new BigQuery({
credentials: resource,
projectId: resource.project_id
})

return await bigquery.dataset(data.datasetId).table(data.tableId).insert(data.rows)
}
4 changes: 4 additions & 0 deletions integrations/gcloud/Bigquery_Insert_Rows/script.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"summary": "Bigquery Insert Rows",
"description": "Inserts rows into a BigQuery table. [See the docs](https://github.com/googleapis/nodejs-bigquery) and for an example [here](https://github.com/googleapis/nodejs-bigquery/blob/main/samples/insertRowsAsStream.js)."
}
18 changes: 18 additions & 0 deletions integrations/gcloud/Create_Bucket/script.bun.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { expect, test } from 'bun:test'
import { main } from './script.bun.ts'
import { Storage } from '@google-cloud/storage'
import { resource } from '../resource.ts'

test('Create Bucket', async () => {
const bucketName = Math.random().toString(36).slice(2)
const response = await main(resource, bucketName)

expect(response).toBeDefined()
expect(response[0].metadata.name).toBe(bucketName)

const storage = new Storage({
credentials: resource,
projectId: resource.project_id
})
await storage.bucket(bucketName).delete()
})
24 changes: 24 additions & 0 deletions integrations/gcloud/Create_Bucket/script.bun.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { Storage } from '@google-cloud/storage'

type Gcloud = {
type: string
project_id: string
private_key_id: string
private_key: string
client_email: string
client_id: string
auth_uri: string
token_uri: string
auth_provider_x509_cert_url: string
client_x509_cert_url: string
universe_domain: string
}

export async function main(resource: Gcloud, bucketName: string) {
const storage = new Storage({
credentials: resource,
projectId: resource.project_id
})

return await storage.createBucket(bucketName)
}
4 changes: 4 additions & 0 deletions integrations/gcloud/Create_Bucket/script.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"summary": "Create Bucket",
"description": "Creates a bucket on Google Cloud Storage [See the docs](https://googleapis.dev/nodejs/storage/latest/Bucket.html#create)"
}
23 changes: 23 additions & 0 deletions integrations/gcloud/Get_Bucket_Metadata/script.bun.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { expect, test } from 'bun:test'
import { main } from './script.bun.ts'
import { main as createBucket } from '../Create_Bucket/script.bun.ts'
import { Storage } from '@google-cloud/storage'
import { resource } from '../resource.ts'

test('Get Bucket Metadata', async () => {
// Create Bucket first
const bucketName = Math.random().toString(36).slice(2)

await createBucket(resource, bucketName)

const response = await main(resource, bucketName)

expect(response).toBeDefined()
expect(response[0].name).toBe(bucketName)

const storage = new Storage({
credentials: resource,
projectId: resource.project_id
})
await storage.bucket(bucketName).delete()
})
24 changes: 24 additions & 0 deletions integrations/gcloud/Get_Bucket_Metadata/script.bun.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { Storage } from '@google-cloud/storage'

type Gcloud = {
type: string
project_id: string
private_key_id: string
private_key: string
client_email: string
client_id: string
auth_uri: string
token_uri: string
auth_provider_x509_cert_url: string
client_x509_cert_url: string
universe_domain: string
}

export async function main(resource: Gcloud, bucketName: string) {
const storage = new Storage({
credentials: resource,
projectId: resource.project_id
})

return await storage.bucket(bucketName).getMetadata()
}
4 changes: 4 additions & 0 deletions integrations/gcloud/Get_Bucket_Metadata/script.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"summary": "Get Bucket Metadata",
"description": "Gets Google Cloud Storage bucket metadata. [See the docs](https://googleapis.dev/nodejs/storage/latest/Bucket.html#getMetadata)."
}
Loading