Skip to content
Permalink
Browse files
fix: add support for json data type (#593)
* fix: add support for json data type

* fix: skip json test for emulator

* refactor: move JsonObject data type to spanner_v1/types/datatypes.py

* refactor: remove duplicate import

* refactor: remove extra connection creation in test

* refactor: move data_types.py file to google/cloud/spanner_v1/

* fix: increased db version time to current time, to give db backup more time

* fix: undo database_version_time method definition.
  • Loading branch information
vi3k6i5 committed Oct 4, 2021
1 parent 97b2d6b commit bc5ddc3fb1eb7eff9a266fe3d1c3c8a4a6fd3763
@@ -21,6 +21,7 @@

import sqlparse
from google.cloud import spanner_v1 as spanner
from google.cloud.spanner_v1 import JsonObject

from .exceptions import Error, ProgrammingError
from .parser import parse_values
@@ -38,6 +39,7 @@
DateStr: spanner.param_types.DATE,
TimestampStr: spanner.param_types.TIMESTAMP,
decimal.Decimal: spanner.param_types.NUMERIC,
JsonObject: spanner.param_types.JSON,
}

SPANNER_RESERVED_KEYWORDS = {
@@ -58,6 +58,7 @@
from .types.type import StructType
from .types.type import Type
from .types.type import TypeCode
from .data_types import JsonObject

from google.cloud.spanner_v1 import param_types
from google.cloud.spanner_v1.client import Client
@@ -132,6 +133,8 @@
"TransactionSelector",
"Type",
"TypeCode",
# Custom spanner related data types
"JsonObject",
# google.cloud.spanner_v1.services
"SpannerClient",
)
@@ -17,6 +17,7 @@
import datetime
import decimal
import math
import json

import six

@@ -28,7 +29,7 @@
from google.cloud._helpers import _datetime_to_rfc3339
from google.cloud.spanner_v1 import TypeCode
from google.cloud.spanner_v1 import ExecuteSqlRequest

from google.cloud.spanner_v1 import JsonObject

# Validation error messages
NUMERIC_MAX_SCALE_ERR_MSG = (
@@ -166,6 +167,10 @@ def _make_value_pb(value):
if isinstance(value, decimal.Decimal):
_assert_numeric_precision_and_scale(value)
return Value(string_value=str(value))
if isinstance(value, JsonObject):
return Value(
string_value=json.dumps(value, sort_keys=True, separators=(",", ":"),)
)
raise ValueError("Unknown type: %s" % (value,))


@@ -0,0 +1,25 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Custom data types for spanner."""


class JsonObject(dict):
"""
JsonObject type help format Django JSONField to compatible Cloud Spanner's
JSON type. Before making queries, it'll help differentiate between
normal parameters and JSON parameters.
"""

pass
@@ -400,6 +400,11 @@ def test_instance_list_backups(
)
expire_time_1_stamp = expire_time_1.strftime("%Y-%m-%dT%H:%M:%S.%fZ")

# Backup tests are failing because of timeout. As a temporary fix
# we are increasing db version time to current time.
# Read more: https://github.com/googleapis/python-spanner/issues/496
database_version_time = datetime.datetime.now(datetime.timezone.utc)

backup1 = shared_instance.backup(
backup_id_1,
database=shared_database,
@@ -15,11 +15,11 @@
import hashlib
import pickle
import pkg_resources

import pytest

from google.cloud import spanner_v1
from google.cloud.spanner_dbapi.connection import connect, Connection
from google.cloud.spanner_v1 import JsonObject
from . import _helpers

DATABASE_NAME = "dbapi-txn"
@@ -328,6 +328,45 @@ def test_DDL_autocommit(shared_instance, dbapi_database):
conn.commit()


@pytest.mark.skipif(_helpers.USE_EMULATOR, reason="Emulator does not support json.")
def test_autocommit_with_json_data(shared_instance, dbapi_database):
"""Check that DDLs in autocommit mode are immediately executed for
json fields."""
# Create table
conn = Connection(shared_instance, dbapi_database)
conn.autocommit = True

cur = conn.cursor()
cur.execute(
"""
CREATE TABLE JsonDetails (
DataId INT64 NOT NULL,
Details JSON,
) PRIMARY KEY (DataId)
"""
)

# Insert data to table
cur.execute(
sql="INSERT INTO JsonDetails (DataId, Details) VALUES (%s, %s)",
args=(123, JsonObject({"name": "Jakob", "age": "26"})),
)

# Read back the data.
cur.execute("""select * from JsonDetails;""")
got_rows = cur.fetchall()

# Assert the response
assert len(got_rows) == 1
assert got_rows[0][0] == 123
assert got_rows[0][1] == '{"age":"26","name":"Jakob"}'

# Drop the table
cur.execute("DROP TABLE JsonDetails")
conn.commit()
conn.close()


def test_DDL_commit(shared_instance, dbapi_database):
"""Check that DDLs in commit mode are executed on calling `commit()`."""
conn = Connection(shared_instance, dbapi_database)
@@ -16,6 +16,7 @@
import unittest

from google.cloud.spanner_v1 import param_types
from google.cloud.spanner_v1 import JsonObject


class TestParseUtils(unittest.TestCase):
@@ -333,9 +334,11 @@ def test_get_param_types(self):
import datetime
import decimal

from google.cloud.spanner_dbapi.parse_utils import DateStr
from google.cloud.spanner_dbapi.parse_utils import TimestampStr
from google.cloud.spanner_dbapi.parse_utils import get_param_types
from google.cloud.spanner_dbapi.parse_utils import (
DateStr,
TimestampStr,
get_param_types,
)

params = {
"a1": 10,
@@ -349,6 +352,7 @@ def test_get_param_types(self):
"i1": b"bytes",
"j1": None,
"k1": decimal.Decimal("3.194387483193242e+19"),
"l1": JsonObject({"key": "value"}),
}
want_types = {
"a1": param_types.INT64,
@@ -361,6 +365,7 @@ def test_get_param_types(self):
"h1": param_types.DATE,
"i1": param_types.BYTES,
"k1": param_types.NUMERIC,
"l1": param_types.JSON,
}
got_types = get_param_types(params)
self.assertEqual(got_types, want_types)

0 comments on commit bc5ddc3

Please sign in to comment.