Skip to content

Commit

Permalink
🔧 use bigquery emulator for tests (#166)
Browse files Browse the repository at this point in the history
* pull request target for bigquery

* emulate bigquery

* pr

* fmt

* rm auth code

* try inline options

* pass project first

* try to overide entrypoint

* ovveride entrypoint

* point to correct entrypint

* try one line

* try w/o a service

* -d

* don't install storage for GHA

* rm extra line

* fix test
  • Loading branch information
zschumacher committed Jul 30, 2023
1 parent 1e6912a commit e4da275
Show file tree
Hide file tree
Showing 6 changed files with 55 additions and 45 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/test-bigquery.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ jobs:
- name: checkout
uses: actions/checkout@v1

- name: start bq emulator
run: |
docker run -d -p 9050:9050 -p 9060:9060 ghcr.io/goccy/bigquery-emulator:latest --project pydapper --dataset pydapper
- name: set up python
uses: actions/setup-python@v2
with:
Expand All @@ -41,7 +45,7 @@ jobs:

- name: install dependencies
if: steps.cache-poetry-deps.outputs.cache-hit != 'true'
run: poetry install -E google-cloud-bigquery -E google-cloud-bigquery-storage
run: poetry install -E google-cloud-bigquery

- name: test and coverage
env:
Expand Down
8 changes: 8 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,11 @@ services:
APP_USER: "pydapper"
APP_USER_PASSWORD: "pydapper"
ORACLE_DATABASE: "pydapper"

bigquery:
image: ghcr.io/goccy/bigquery-emulator:latest
platform: "linux/amd64"
command: "--project=pydapper --dataset=pydapper"
ports:
- "9050:9050"
- "9060:9060"
2 changes: 1 addition & 1 deletion tests/databases/bigquery/owner.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
CREATE TABLE IF NOT EXISTS {owner_table_name} (
id integer NOT NULL,
id int64 NOT NULL,
name string NOT NULL
);
4 changes: 2 additions & 2 deletions tests/databases/bigquery/task.sql
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
CREATE TABLE IF NOT EXISTS {task_table_name} (
id integer,
id int64,
description string NOT NULL,
due_date date NOT NULL,
owner_id integer NOT NULL
owner_id int64 NOT NULL
);
51 changes: 17 additions & 34 deletions tests/test_bigquery/conftest.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,11 @@
import json
import os
import sys
import uuid
from pathlib import Path

import pytest

from pydapper.bigquery import GoogleBigqueryClientCommands

AUTH_DIR = Path(__file__).parent / "auth"
AUTH_FILE_PATH = AUTH_DIR / "key.json"

AUTH = {
"type": "service_account",
"project_id": "pydapper",
"private_key_id": "08c8a357ab549f6d34f1705512bdb00c2efaf68f",
"private_key": os.getenv("GOOGLE_PRIVATE_KEY", "DUMMY").replace("\\n", "\n"),
"client_email": "pydapper@pydapper.iam.gserviceaccount.com",
"client_id": "105936813038399443987",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/pydapper%40pydapper.iam.gserviceaccount.com",
}


@pytest.fixture(autouse=True, scope="session")
def write_auth_file():
AUTH_DIR.mkdir(exist_ok=True)
with open(AUTH_FILE_PATH, "w") as auth_file:
json.dump(AUTH, auth_file)


@pytest.fixture(scope="function")
def python_version():
Expand All @@ -46,32 +21,40 @@ def func_uuid():

@pytest.fixture(scope="function")
def task_table_name(python_version, func_uuid):
return f"pydapper.task_{python_version}_{func_uuid.hex}"
return f"pydapper.pydapper.task_{python_version}_{func_uuid.hex}"


@pytest.fixture(scope="function")
def owner_table_name(python_version, func_uuid):
return f"pydapper.owner_{python_version}_{func_uuid.hex}"
return f"pydapper.pydapper.owner_{python_version}_{func_uuid.hex}"


@pytest.fixture(scope="function")
def creds_as_env_var(monkeypatch):
monkeypatch.setenv("GOOGLE_APPLICATION_CREDENTIALS", str(AUTH_FILE_PATH))
@pytest.fixture
def client(monkeypatch):
from google.api_core.client_options import ClientOptions
from google.auth.credentials import AnonymousCredentials
from google.cloud.bigquery import Client

options = ClientOptions(api_endpoint="http://localhost:9050")

client = Client(client_options=options, credentials=AnonymousCredentials(), project="pydapper")
yield client


@pytest.fixture(scope="function")
def commands() -> GoogleBigqueryClientCommands:
def commands(client) -> GoogleBigqueryClientCommands:
from google.cloud.bigquery.dbapi import connect

with GoogleBigqueryClientCommands(connect()) as commands:
with GoogleBigqueryClientCommands(connect(client=client)) as commands:
yield commands


@pytest.fixture(scope="function")
def bigquery_setup(setup_sql_dir, creds_as_env_var, python_version, owner_table_name, task_table_name):
def bigquery_setup(client, setup_sql_dir, python_version, owner_table_name, task_table_name):
from google.cloud.bigquery.dbapi import connect

conn = connect()
conn = connect(client=client)

cursor = conn.cursor()
owner = (setup_sql_dir / "bigquery" / "owner.sql").read_text().format(owner_table_name=owner_table_name)
cursor.execute(owner)
Expand Down
29 changes: 22 additions & 7 deletions tests/test_bigquery/test_google_bigquery_client.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import datetime

import pytest

import pydapper
from pydapper.bigquery import GoogleBigqueryClientCommands
from tests.test_suites.commands import ExecuteScalarTestSuite
from tests.test_suites.commands import ExecuteTestSuite
from tests.test_suites.commands import QueryFirstOrDefaultTestSuite
from tests.test_suites.commands import QueryFirstTestSuite
from tests.test_suites.commands import QueryMultipleTestSuite
Expand All @@ -14,22 +15,36 @@
pytestmark = pytest.mark.bigquery


def test_using(creds_as_env_var):
def test_using(client):
from google.cloud.bigquery.dbapi import connect

with pydapper.using(connect()) as commands:
with pydapper.using(connect(client=client)) as commands:
assert isinstance(commands, GoogleBigqueryClientCommands)


@pytest.mark.parametrize("driver", ["bigquery", "bigquery+google"])
def test_connect_from_env(creds_as_env_var, driver):
with pydapper.connect(f"{driver}:////") as commands:
def test_connect(driver, client):
with pydapper.connect(f"{driver}:////", client=client) as commands:
assert isinstance(commands, GoogleBigqueryClientCommands)


@pytest.mark.usefixtures("bigquery_setup")
class TestExecute(ExecuteTestSuite):
...
class TestExecute:
def test_single(self, commands, owner_table_name):
commands.execute(f"UPDATE {owner_table_name} SET name = ?new_name? WHERE id = 1", {"new_name": "Zachary"})
assert commands.execute_scalar(f"select name from {owner_table_name} where id = 1") == "Zachary"

def test_multiple(self, commands, task_table_name):
commands.execute(
f"INSERT INTO {task_table_name} (description, due_date, owner_id) "
"VALUES (?description?, ?due_date?, ?owner_id?)",
[
{"description": "new task", "due_date": datetime.date(2022, 1, 1), "owner_id": 1},
{"description": "another new task", "due_date": datetime.date(2022, 1, 1), "owner_id": 1},
],
)
# count seems to not work with the bigquery emulator?
assert len(commands.query(f"select * from {task_table_name}")) == 5


@pytest.mark.usefixtures("bigquery_setup")
Expand Down

0 comments on commit e4da275

Please sign in to comment.