diff --git a/.flake8 b/.flake8
index da87de0..9e634cc 100644
--- a/.flake8
+++ b/.flake8
@@ -1,4 +1,4 @@
[flake8]
-ignore = E203,E303,E402,E501,E722,W391,F401,W292,F811
+ignore = E203,E303,E402,E501,E722,W391,F401,W292,F811,E302
max-line-length = 150
max-complexity = 22
diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml
new file mode 100644
index 0000000..71b18a4
--- /dev/null
+++ b/.github/workflows/python-ci.yml
@@ -0,0 +1,32 @@
+name: Python CI
+
+on:
+ workflow_dispatch:
+ push:
+ branches: [develop]
+ pull_request:
+ branches: [develop]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.12'
+ cache: 'pip'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install -r dev_requirements.txt --no-cache-dir
+
+ - name: Create dummy poly directory for tests
+ run: mkdir -p polyapi/poly
+
+ - name: Run unit tests
+ run: python -m unittest discover -s tests -t . -v
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 135534e..9ca9ad0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,4 +34,5 @@ function_add_test.py
lib_test*.py
polyapi/poly
polyapi/vari
+polyapi/tabi
polyapi/schemas
diff --git a/README.md b/README.md
index 9c4551f..84835bd 100644
--- a/README.md
+++ b/README.md
@@ -70,24 +70,6 @@ def bar():
return "Hello World"
```
-## See Server Function Logs
-
-In order to see function logs, please first set `logsEnabled` to `true` in Canopy for the function.
-
-https://na1.polyapi.io/canopy/polyui/collections/server-functions
-
-Then in your code, get the poly logger and log with it like so:
-
-```python
-logger = logging.getLogger("poly")
-def bar():
- logger.warning("I AM THE LOG")
- return "Hello World"
-```
-
-Finally, click the "Show Logs" button to see your server function logs in Canopy!
-
-
## Complex Types In Server Functions
You can define arbitrarily complex argument and return types using TypedDicts.
diff --git a/dev_requirements.txt b/dev_requirements.txt
index 8f81da5..79a3290 100644
--- a/dev_requirements.txt
+++ b/dev_requirements.txt
@@ -1,3 +1,5 @@
-r requirements.txt
mock==5.2.0
-pytest
\ No newline at end of file
+pytest
+flask==3.0.3
+
diff --git a/polyapi/auth.py b/polyapi/auth.py
index 199cfef..3d6c325 100644
--- a/polyapi/auth.py
+++ b/polyapi/auth.py
@@ -1,5 +1,4 @@
from typing import List, Dict, Any, Tuple
-import uuid
from polyapi.typedefs import PropertySpecification
from polyapi.utils import parse_arguments, get_type_and_def
@@ -26,7 +25,8 @@ async def getToken(clientId: str, clientSecret: str, scopes: List[str], callback
Function ID: {function_id}
\"""
- eventsClientId = "{client_id}"
+ from polyapi.poly.client_id import client_id
+ eventsClientId = client_id
function_id = "{function_id}"
options = options or {{}}
@@ -165,7 +165,7 @@ def render_auth_function(
func_str = ""
if function_name == "getToken":
- func_str = GET_TOKEN_TEMPLATE.format(function_id=function_id, description=function_description, client_id=uuid.uuid4().hex)
+ func_str = GET_TOKEN_TEMPLATE.format(function_id=function_id, description=function_description)
elif function_name == "introspectToken":
func_str = INTROSPECT_TOKEN_TEMPLATE.format(function_id=function_id, description=function_description)
elif function_name == "refreshToken":
diff --git a/polyapi/deployables.py b/polyapi/deployables.py
index a3feb61..fc0a6f3 100644
--- a/polyapi/deployables.py
+++ b/polyapi/deployables.py
@@ -1,4 +1,6 @@
import os
+import string
+import random
import subprocess
import json
import hashlib
@@ -65,20 +67,21 @@ class SyncDeployment(TypedDict, total=False):
context: str
name: str
description: str
- type: str
+ type: DeployableTypes
fileRevision: str
file: str
types: DeployableFunctionTypes
- typeSchemas: Dict[str, any]
+ typeSchemas: Dict[str, Any]
dependencies: List[str]
- config: Dict[str, any]
+ config: Dict[str, Any]
instance: str
- id: Optional[str] = None
- deployed: Optional[str] = None
+ id: Optional[str]
+ deployed: Optional[str]
+
DeployableTypeEntries: List[Tuple[DeployableTypeNames, DeployableTypes]] = [
- ("PolyServerFunction", "server-function"),
- ("PolyClientFunction", "client-function"),
+ ("PolyServerFunction", "server-function"), # type: ignore
+ ("PolyClientFunction", "client-function"), # type: ignore
]
DeployableTypeToName: Dict[DeployableTypeNames, DeployableTypes] = {name: type for name, type in DeployableTypeEntries}
@@ -118,13 +121,13 @@ def get_all_deployable_files_windows(config: PolyDeployConfig) -> List[str]:
pattern = ' '.join(f"/C:\"polyConfig: {name}\"" for name in config["type_names"]) or '/C:"polyConfig"'
exclude_command = f" | findstr /V /I \"{exclude_pattern}\"" if exclude_pattern else ''
- search_command = f" | findstr /S /M /I /F:/ {pattern} *.*"
+ search_command = f" | findstr /M /I /F:/ {pattern}"
result = []
for dir_path in config["include_dirs"]:
if dir_path != '.':
include_pattern = " ".join(f"{dir_path}*.{f}" for f in config["include_files_or_extensions"]) or "*"
- dir_command = f"dir {include_pattern} /S /P /B > NUL"
+ dir_command = f"dir {include_pattern} /S /P /B"
full_command = f"{dir_command}{exclude_command}{search_command}"
try:
output = subprocess.check_output(full_command, shell=True, text=True)
@@ -175,7 +178,7 @@ def get_git_revision(branch_or_tag: str = "HEAD") -> str:
return check_output(["git", "rev-parse", "--short", branch_or_tag], text=True).strip()
except CalledProcessError:
# Return a random 7-character hash as a fallback
- return "".join(format(ord(c), 'x') for c in os.urandom(4))[:7]
+ return "".join([random.choice(string.ascii_letters + string.digits) for _ in range(7)])
def get_cache_deployments_revision() -> str:
"""Retrieve the cache deployments revision from a file."""
diff --git a/polyapi/execute.py b/polyapi/execute.py
index d066574..5d75048 100644
--- a/polyapi/execute.py
+++ b/polyapi/execute.py
@@ -1,9 +1,13 @@
from typing import Dict, Optional
import requests
+import os
+import logging
from requests import Response
from polyapi.config import get_api_key_and_url, get_mtls_config
from polyapi.exceptions import PolyApiException
+logger = logging.getLogger("poly")
+
def direct_execute(function_type, function_id, data) -> Response:
""" execute a specific function id/type
"""
@@ -13,7 +17,11 @@ def direct_execute(function_type, function_id, data) -> Response:
endpoint_info = requests.post(url, json=data, headers=headers)
if endpoint_info.status_code < 200 or endpoint_info.status_code >= 300:
- raise PolyApiException(f"{endpoint_info.status_code}: {endpoint_info.content.decode('utf-8', errors='ignore')}")
+ error_content = endpoint_info.content.decode("utf-8", errors="ignore")
+ if function_type == 'api' and os.getenv("LOGS_ENABLED"):
+ raise PolyApiException(f"Error executing api function with id: {function_id}. Status code: {endpoint_info.status_code}. Request data: {data}, Response: {error_content}")
+ elif function_type != 'api':
+ raise PolyApiException(f"{endpoint_info.status_code}: {error_content}")
endpoint_info_data = endpoint_info.json()
request_params = endpoint_info_data.copy()
@@ -38,9 +46,12 @@ def direct_execute(function_type, function_id, data) -> Response:
**request_params
)
- if resp.status_code < 200 or resp.status_code >= 300:
+ if (resp.status_code < 200 or resp.status_code >= 300):
error_content = resp.content.decode("utf-8", errors="ignore")
- raise PolyApiException(f"{resp.status_code}: {error_content}")
+ if function_type == 'api' and os.getenv("LOGS_ENABLED"):
+ logger.error(f"Error executing api function with id: {function_id}. Status code: {resp.status_code}. Request data: {data}, Response: {error_content}")
+ elif function_type != 'api':
+ raise PolyApiException(f"{resp.status_code}: {error_content}")
return resp
@@ -59,9 +70,12 @@ def execute(function_type, function_id, data) -> Response:
headers=headers,
)
- if resp.status_code < 200 or resp.status_code >= 300:
+ if (resp.status_code < 200 or resp.status_code >= 300) and os.getenv("LOGS_ENABLED"):
error_content = resp.content.decode("utf-8", errors="ignore")
- raise PolyApiException(f"{resp.status_code}: {error_content}")
+ if function_type == 'api' and os.getenv("LOGS_ENABLED"):
+ logger.error(f"Error executing api function with id: {function_id}. Status code: {resp.status_code}. Request data: {data}, Response: {error_content}")
+ elif function_type != 'api':
+ raise PolyApiException(f"{resp.status_code}: {error_content}")
return resp
diff --git a/polyapi/generate.py b/polyapi/generate.py
index c0cd9e0..16793fc 100644
--- a/polyapi/generate.py
+++ b/polyapi/generate.py
@@ -1,9 +1,12 @@
import json
import requests
import os
+import uuid
import shutil
import logging
import tempfile
+
+from copy import deepcopy
from typing import Any, List, Optional, Tuple, cast
from .auth import render_auth_function
@@ -11,11 +14,12 @@
from .poly_schemas import generate_schemas
from .webhook import render_webhook_handle
-from .typedefs import PropertySpecification, SchemaSpecDto, SpecificationDto, VariableSpecDto
+from .typedefs import PropertySpecification, SchemaSpecDto, SpecificationDto, VariableSpecDto, TableSpecDto
from .api import render_api_function
from .server import render_server_function
from .utils import add_import_to_init, get_auth_headers, init_the_init, print_green, to_func_namespace
from .variables import generate_variables
+from .poly_tables import generate_tables
from .config import get_api_key_and_url, get_direct_execute_config, get_cached_generate_args
SUPPORTED_FUNCTION_TYPES = {
@@ -26,7 +30,7 @@
"webhookHandle",
}
-SUPPORTED_TYPES = SUPPORTED_FUNCTION_TYPES | {"serverVariable", "schema", "snippet"}
+SUPPORTED_TYPES = SUPPORTED_FUNCTION_TYPES | {"serverVariable", "schema", "snippet", "table"}
X_POLY_REF_WARNING = '''"""
@@ -136,19 +140,22 @@ def parse_function_specs(
limit_ids: List[str] | None = None, # optional list of ids to limit to
) -> List[SpecificationDto]:
functions = []
- for spec in specs:
- if not spec:
+ for raw_spec in specs:
+ if not raw_spec:
continue
# For no_types mode, we might not have function data, but we still want to include the spec
# if it's a supported function type
- if spec["type"] not in SUPPORTED_FUNCTION_TYPES:
+ if raw_spec["type"] not in SUPPORTED_FUNCTION_TYPES:
continue
# Skip if we have a limit and this spec is not in it
- if limit_ids and spec.get("id") not in limit_ids:
+ if limit_ids and raw_spec.get("id") not in limit_ids:
continue
+ # Should really be fixed in specs api, but for now handle json strings in arg schemas
+ spec = normalize_args_schema(raw_spec)
+
# For customFunction, check language if we have function data
if spec["type"] == "customFunction":
if spec.get("language") and spec["language"] != "python":
@@ -190,16 +197,18 @@ def read_cached_specs() -> List[SpecificationDto]:
return json.loads(f.read())
-def get_variables() -> List[VariableSpecDto]:
- specs = read_cached_specs()
+def get_variables(specs: List[SpecificationDto]) -> List[VariableSpecDto]:
return [cast(VariableSpecDto, spec) for spec in specs if spec["type"] == "serverVariable"]
-def get_schemas() -> List[SchemaSpecDto]:
- specs = read_cached_specs()
+def get_schemas(specs: List[SpecificationDto]) -> List[SchemaSpecDto]:
return [cast(SchemaSpecDto, spec) for spec in specs if spec["type"] == "schema"]
+def get_tables(specs: List[SpecificationDto]) -> List[TableSpecDto]:
+ return [cast(TableSpecDto, spec) for spec in specs if spec["type"] == "table"]
+
+
def remove_old_library():
currdir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(currdir, "poly")
@@ -214,6 +223,10 @@ def remove_old_library():
if os.path.exists(path):
shutil.rmtree(path)
+ path = os.path.join(currdir, "tabi")
+ if os.path.exists(path):
+ shutil.rmtree(path)
+
def create_empty_schemas_module():
"""Create an empty schemas module for no-types mode so user code can still import from polyapi.schemas"""
@@ -272,6 +285,14 @@ def __class_getitem__(cls, item):
''')
+def _generate_client_id() -> None:
+ full_path = os.path.dirname(os.path.abspath(__file__))
+ full_path = os.path.join(full_path, "poly", "client_id.py")
+ with open(full_path, "w") as f:
+ f.write(f'client_id = "{uuid.uuid4().hex}"')
+
+
+
def generate_from_cache() -> None:
"""
Generate using cached values after non-explicit call.
@@ -286,6 +307,37 @@ def generate_from_cache() -> None:
)
+def _parse_arg_schema(value: Any) -> Any:
+ if isinstance(value, str):
+ text = value.strip()
+ if text and text[0] in "{[":
+ try:
+ return json.loads(text)
+ except json.JSONDecodeError:
+ logging.warning("Could not parse function argument schema (leaving as str): %s", text[:200])
+ return value
+
+
+def normalize_args_schema(
+ raw_spec: SpecificationDto
+) -> SpecificationDto:
+ spec = deepcopy(raw_spec)
+
+ function_block = spec.get("function")
+ if not isinstance(function_block, dict):
+ return spec
+ arguments_block = function_block.get("arguments")
+ if not isinstance(arguments_block, list):
+ return spec
+
+ for argument in arguments_block:
+ arg_type = argument.get("type")
+ if isinstance(arg_type, dict) and "schema" in arg_type:
+ arg_type["schema"] = _parse_arg_schema(arg_type["schema"])
+
+ return spec
+
+
def generate(contexts: Optional[List[str]] = None, names: Optional[List[str]] = None, function_ids: Optional[List[str]] = None, no_types: bool = False) -> None:
generate_msg = f"Generating Poly Python SDK for contexts ${contexts}..." if contexts else "Generating Poly Python SDK..."
print(generate_msg, end="", flush=True)
@@ -297,9 +349,11 @@ def generate(contexts: Optional[List[str]] = None, names: Optional[List[str]] =
limit_ids: List[str] = [] # useful for narrowing down generation to a single function to debug
functions = parse_function_specs(specs, limit_ids=limit_ids)
+ _generate_client_id()
+
# Only process schemas if no_types is False
if not no_types:
- schemas = get_schemas()
+ schemas = get_schemas(specs)
schema_index = build_schema_index(schemas)
if schemas:
schema_limit_ids: List[str] = [] # useful for narrowing down generation to a single function to debug
@@ -323,7 +377,11 @@ def generate(contexts: Optional[List[str]] = None, names: Optional[List[str]] =
)
exit()
- variables = get_variables()
+ tables = get_tables(specs)
+ if tables:
+ generate_tables(tables)
+
+ variables = get_variables(specs)
if variables:
generate_variables(variables)
@@ -335,14 +393,7 @@ def generate(contexts: Optional[List[str]] = None, names: Optional[List[str]] =
def clear() -> None:
- base = os.path.dirname(os.path.abspath(__file__))
- poly_path = os.path.join(base, "poly")
- if os.path.exists(poly_path):
- shutil.rmtree(poly_path)
-
- vari_path = os.path.join(base, "vari")
- if os.path.exists(vari_path):
- shutil.rmtree(vari_path)
+ remove_old_library()
print("Cleared!")
diff --git a/polyapi/poly_schemas.py b/polyapi/poly_schemas.py
index 30d5ab5..c370c77 100644
--- a/polyapi/poly_schemas.py
+++ b/polyapi/poly_schemas.py
@@ -121,7 +121,7 @@ def add_schema_file(
# Read current __init__.py content if it exists
init_content = ""
if os.path.exists(init_path):
- with open(init_path, "r") as f:
+ with open(init_path, "r", encoding='utf-8') as f:
init_content = f.read()
# Prepare new content to append to __init__.py
@@ -129,12 +129,12 @@ def add_schema_file(
# Use temporary files for atomic writes
# Write to __init__.py atomically
- with tempfile.NamedTemporaryFile(mode="w", delete=False, dir=full_path, suffix=".tmp") as temp_init:
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, dir=full_path, suffix=".tmp", encoding='utf-8') as temp_init:
temp_init.write(new_init_content)
temp_init_path = temp_init.name
# Write to schema file atomically
- with tempfile.NamedTemporaryFile(mode="w", delete=False, dir=full_path, suffix=".tmp") as temp_schema:
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, dir=full_path, suffix=".tmp", encoding='utf-8') as temp_schema:
temp_schema.write(schema_defs)
temp_schema_path = temp_schema.name
@@ -205,7 +205,7 @@ def create_schema(
def add_schema_to_init(full_path: str, spec: SchemaSpecDto):
init_the_init(full_path, code_imports="")
init_path = os.path.join(full_path, "__init__.py")
- with open(init_path, "a") as f:
+ with open(init_path, "a", encoding='utf-8') as f:
f.write(render_poly_schema(spec) + "\n\n")
diff --git a/polyapi/poly_tables.py b/polyapi/poly_tables.py
new file mode 100644
index 0000000..3b51913
--- /dev/null
+++ b/polyapi/poly_tables.py
@@ -0,0 +1,456 @@
+import os
+import requests
+from typing_extensions import NotRequired, TypedDict
+from typing import List, Union, Type, Dict, Any, Literal, Tuple, Optional, get_args, get_origin
+from polyapi.utils import add_import_to_init, init_the_init
+from polyapi.typedefs import TableSpecDto
+from polyapi.constants import JSONSCHEMA_TO_PYTHON_TYPE_MAP
+
+
+def scrub_keys(e: Exception) -> Dict[str, Any]:
+ """
+ Scrub the keys of an exception to remove sensitive information.
+ Returns a dictionary with the error message and type.
+ """
+ return {
+ "error": str(e),
+ "type": type(e).__name__,
+ "message": str(e),
+ "args": getattr(e, 'args', None)
+ }
+
+
+def execute_query(table_id, method, query):
+ from polyapi import polyCustom
+ from polyapi.poly.client_id import client_id
+ try:
+ url = f"/tables/{table_id}/{method}?clientId={client_id}"
+ headers = {{
+ 'x-poly-execution-id': polyCustom.get('executionId')
+ }}
+ response = requests.post(url, json=query, headers=headers)
+ response.raise_for_status()
+ return response.json()
+ except Exception as e:
+ return scrub_keys(e)
+
+
+def first_result(rsp):
+ if isinstance(rsp, dict) and isinstance(rsp.get('results'), list):
+ return rsp['results'][0] if rsp['results'] else None
+ return rsp
+
+
+_key_transform_map = {
+ "not_": "not",
+ "in": "in",
+ "starts_with": "startsWith",
+ "ends_with": "startsWith",
+ "not_in": "notIn",
+}
+
+
+def _transform_keys(obj: Any) -> Any:
+ if isinstance(obj, dict):
+ return {
+ _key_transform_map.get(k, k): _transform_keys(v)
+ for k, v in obj.items()
+ }
+
+ elif isinstance(obj, list):
+ return [_transform_keys(v) for v in obj]
+
+ else:
+ return obj
+
+
+def transform_query(query: dict) -> dict:
+ if query["where"] or query["order_by"]:
+ return {
+ **query,
+ "where": _transform_keys(query["where"]) if query["where"] else None,
+ "orderBy": query["order_by"] if query["order_by"] else None
+ }
+
+ return query
+
+
+TABI_TABLE_TEMPLATE = '''
+{table_name}Columns = Literal[{table_columns}]
+
+
+
+{table_row_classes}
+
+
+
+{table_row_subset_class}
+
+
+
+{table_where_class}
+
+
+
+class {table_name}SelectManyQuery(TypedDict):
+ where: NotRequired[{table_name}WhereFilter]
+ order_by: NotRequired[Dict[{table_name}Columns, SortOrder]]
+ limit: NotRequired[int]
+ offset: NotRequired[int]
+
+
+
+class {table_name}SelectOneQuery(TypedDict):
+ where: NotRequired[{table_name}WhereFilter]
+ order_by: NotRequired[Dict[{table_name}Columns, SortOrder]]
+
+
+
+class {table_name}InsertOneQuery(TypedDict):
+ data: {table_name}Subset
+
+
+
+class {table_name}InsertManyQuery(TypedDict):
+ data: List[{table_name}Subset]
+
+
+
+class {table_name}UpdateManyQuery(TypedDict):
+ where: NotRequired[{table_name}WhereFilter]
+ data: {table_name}Subset
+
+
+
+class {table_name}DeleteQuery(TypedDict):
+ where: NotRequired[{table_name}WhereFilter]
+
+
+
+class {table_name}QueryResults(TypedDict):
+ results: List[{table_name}Row]
+ pagination: None # Pagination not yet supported
+
+
+
+class {table_name}CountQuery(TypedDict):
+ where: NotRequired[{table_name}WhereFilter]
+
+
+
+class {table_name}:{table_description}
+ table_id = "{table_id}"
+
+ @overload
+ @staticmethod
+ def count(query: {table_name}CountQuery) -> PolyCountResult: ...
+ @overload
+ @staticmethod
+ def count(*, where: Optional[{table_name}WhereFilter]) -> PolyCountResult: ...
+
+ @staticmethod
+ def count(*args, **kwargs) -> PolyCountResult:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query({table_name}.table_id, "count", transform_query(query))
+
+ @overload
+ @staticmethod
+ def select_many(query: {table_name}SelectManyQuery) -> {table_name}QueryResults: ...
+ @overload
+ @staticmethod
+ def select_many(*, where: Optional[{table_name}WhereFilter], order_by: Optional[Dict[{table_name}Columns, SortOrder]], limit: Optional[int], offset: Optional[int]) -> {table_name}QueryResults: ...
+
+ @staticmethod
+ def select_many(*args, **kwargs) -> {table_name}QueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if query.get('limit') is None:
+ query['limit'] = 1000
+ if query['limit'] > 1000:
+ raise ValueError("Cannot select more than 1000 rows at a time.")
+ return execute_query({table_name}.table_id, "select", transform_query(query))
+
+ @overload
+ @staticmethod
+ def select_one(query: {table_name}SelectOneQuery) -> {table_name}Row: ...
+ @overload
+ @staticmethod
+ def select_one(*, where: Optional[{table_name}WhereFilter], order_by: Optional[Dict[{table_name}Columns, SortOrder]]) -> {table_name}Row: ...
+
+ @staticmethod
+ def select_one(*args, **kwargs) -> {table_name}Row:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ query['limit'] = 1
+ return first_result(execute_query({table_name}.table_id, "select", transform_query(query)))
+
+ @overload
+ @staticmethod
+ def insert_many(query: {table_name}InsertManyQuery) -> {table_name}QueryResults: ...
+ @overload
+ @staticmethod
+ def insert_many(*, data: List[{table_name}Subset]) -> {table_name}QueryResults: ...
+
+ @staticmethod
+ def insert_many(*args, **kwargs) -> {table_name}QueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if len(query['data']) > 1000:
+ raise ValueError("Cannot insert more than 1000 rows at a time.")
+ return execute_query({table_name}.table_id, "insert", query)
+
+ @overload
+ @staticmethod
+ def insert_one(query: {table_name}InsertOneQuery) -> {table_name}Row: ...
+ @overload
+ @staticmethod
+ def insert_one(*, data: {table_name}Subset) -> {table_name}Row: ...
+
+ @staticmethod
+ def insert_one(*args, **kwargs) -> {table_name}Row:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return first_result(execute_query({table_name}.table_id, "insert", {{ 'data': [query['data']] }}))
+
+ @overload
+ @staticmethod
+ def upsert_many(query: {table_name}InsertManyQuery) -> {table_name}QueryResults: ...
+ @overload
+ @staticmethod
+ def upsert_many(*, data: List[{table_name}Subset]) -> {table_name}QueryResults: ...
+
+ @staticmethod
+ def upsert_many(*args, **kwargs) -> {table_name}QueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if len(data) > 1000:
+ raise ValueError("Cannot upsert more than 1000 rows at a time.")
+ return execute_query({table_name}.table_id, "upsert", query)
+
+ @overload
+ @staticmethod
+ def upsert_one(query: {table_name}InsertOneQuery) -> {table_name}Row: ...
+ @overload
+ @staticmethod
+ def upsert_one(*, data: {table_name}Subset) -> {table_name}Row: ...
+
+ @staticmethod
+ def upsert_one(*args, **kwargs) -> {table_name}Row:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return first_result(execute_query({table_name}.table_id, "upsert", {{ 'data': [query['data']] }}))
+
+ @overload
+ @staticmethod
+ def update_many(query: {table_name}UpdateManyQuery) -> {table_name}QueryResults: ...
+ @overload
+ @staticmethod
+ def update_many(*, where: Optional[{table_name}WhereFilter], data: {table_name}Subset) -> {table_name}QueryResults: ...
+
+ @staticmethod
+ def update_many(*args, **kwargs) -> {table_name}QueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query({table_name}.table_id, "update", transform_query(query))
+
+ @overload
+ @staticmethod
+ def delete_many(query: {table_name}DeleteQuery) -> PolyDeleteResults: ...
+ @overload
+ @staticmethod
+ def delete_many(*, where: Optional[{table_name}WhereFilter]) -> PolyDeleteResults: ...
+
+ @staticmethod
+ def delete_many(*args, **kwargs) -> PolyDeleteResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query({table_name}.table_id, "delete", query)
+'''
+
+
+def _get_column_type_str(name: str, schema: Dict[str, Any], is_required: bool) -> str:
+ result = ""
+
+ col_type = schema.get("type", "object")
+ if isinstance(col_type, list):
+ subtypes = [_get_column_type_str(name, { **schema, "type": t }, is_required) for t in col_type]
+ result = f"Union[{', '.join(subtypes)}]"
+ elif col_type == "array":
+ if isinstance(schema["items"], list):
+ subtypes = [_get_column_type_str(f"{name}{i}", s, True) for i, s in enumerate(schema["items"])]
+ result = f"Tuple[{', '.join(subtypes)}]"
+ elif isinstance(schema["items"], dict):
+ result = f"List[{_get_column_type_str(name, schema['items'], True)}]"
+ else:
+ result = "List[Any]"
+ elif col_type == "object":
+ if isinstance(schema.get("patternProperties"), dict):
+ # TODO: Handle multiple pattern properties
+ result = f"Dict[str, {_get_column_type_str(f'{name}_', schema['patternProperties'], True)}]"
+ elif isinstance(schema.get("properties"), dict) and len(schema["properties"].values()) > 0:
+ # TODO: Handle x-poly-refs
+ result = f'"{name}"'
+ else:
+ result = "Dict[str, Any]"
+ else:
+ result = JSONSCHEMA_TO_PYTHON_TYPE_MAP.get(schema["type"], "")
+
+ if result:
+ return result if is_required else f"Optional[{result}]"
+
+ return "Any"
+
+
+def _render_table_row_classes(table_name: str, schema: Dict[str, Any]) -> str:
+ from polyapi.schema import wrapped_generate_schema_types
+
+ output = wrapped_generate_schema_types(schema, f"{table_name}Row", "Dict")
+
+ return output[1].split("\n", 1)[1].strip()
+
+
+def _render_table_subset_class(table_name: str, columns: List[Tuple[str, Dict[str, Any]]], required: List[str]) -> str:
+ # Generate class which can match any subset of a table row
+ lines = [f"class {table_name}Subset(TypedDict):"]
+
+ for name, schema in columns:
+ type_str = _get_column_type_str(f"_{table_name}Row{name}", schema, name in required)
+ lines.append(f" {name}: NotRequired[{type_str}]")
+
+ return "\n".join(lines)
+
+
+def _render_table_where_class(table_name: str, columns: List[Tuple[str, Dict[str, Any]]], required: List[str]) -> str:
+ # Generate class for the 'where' part of the query
+ lines = [f"class {table_name}WhereFilter(TypedDict):"]
+
+ for name, schema in columns:
+ ftype_str = ""
+ type_str = _get_column_type_str(f"_{table_name}Row{name}", schema, True) # force required to avoid wrapping type in Optional[]
+ is_required = name in required
+ if type_str == "bool":
+ ftype_str = "BooleanFilter" if is_required else "NullableBooleanFilter"
+ elif type_str == "str":
+ ftype_str = "StringFilter" if is_required else "NullableStringFilter"
+ elif type_str in ["int", "float"]:
+ ftype_str = "NumberFilter" if is_required else "NullableNumberFilter"
+ elif is_required == False:
+ type_str = "None"
+ ftype_str = "NullableObjectFilter"
+
+ if ftype_str:
+ lines.append(f" {name}: NotRequired[Union[{type_str}, {ftype_str}]]")
+
+ lines.append(f' AND: NotRequired[Union["{table_name}WhereFilter", List["{table_name}WhereFilter"]]]')
+ lines.append(f' OR: NotRequired[List["{table_name}WhereFilter"]]')
+ lines.append(f' NOT: NotRequired[Union["{table_name}WhereFilter", List["{table_name}WhereFilter"]]]')
+
+ return "\n".join(lines)
+
+
+def _render_table(table: TableSpecDto) -> str:
+ columns = list(table["schema"]["properties"].items())
+ required_colunms = table["schema"].get("required", [])
+
+ table_columns = ",".join([ f'"{k}"' for k,_ in columns])
+ table_row_classes = _render_table_row_classes(table["name"], table["schema"])
+ table_row_subset_class = _render_table_subset_class(table["name"], columns, required_colunms)
+ table_where_class = _render_table_where_class(table["name"], columns, required_colunms)
+ if table.get("description", ""):
+ table_description = '\n """'
+ table_description += '\n '.join(table["description"].replace('"', "'").split("\n"))
+ table_description += '\n """'
+ else:
+ table_description = ""
+
+ return TABI_TABLE_TEMPLATE.format(
+ table_name=table["name"],
+ table_id=table["id"],
+ table_description=table_description,
+ table_columns=table_columns,
+ table_row_classes=table_row_classes,
+ table_row_subset_class=table_row_subset_class,
+ table_where_class=table_where_class,
+ )
+
+
+def generate_tables(tables: List[TableSpecDto]):
+ for table in tables:
+ _create_table(table)
+
+
+def _create_table(table: TableSpecDto) -> None:
+ folders = ["tabi"]
+ if table["context"]:
+ folders += table["context"].split(".")
+
+ # build up the full_path by adding all the folders
+ base_path = os.path.join(os.path.dirname(os.path.abspath(__file__)))
+ full_path = base_path
+
+ for idx, folder in enumerate(folders):
+ full_path = os.path.join(full_path, folder)
+ if not os.path.exists(full_path):
+ os.makedirs(full_path)
+ next = folders[idx + 1] if idx + 1 < len(folders) else None
+ if next:
+ add_import_to_init(full_path, next, "")
+
+ init_path = os.path.join(full_path, "__init__.py")
+
+ imports = "\n".join([
+ "from typing_extensions import NotRequired, TypedDict",
+ "from typing import Union, List, Dict, Any, Literal, Optional, Required, overload",
+ "from polyapi.poly_tables import execute_query, first_result, transform_query",
+ "from polyapi.typedefs import Table, PolyCountResult, PolyDeleteResults, SortOrder, StringFilter, NullableStringFilter, NumberFilter, NullableNumberFilter, BooleanFilter, NullableBooleanFilter, NullableObjectFilter",
+ ])
+ table_contents = _render_table(table)
+
+ file_contents = ""
+ if os.path.exists(init_path):
+ with open(init_path, "r") as f:
+ file_contents = f.read()
+
+ with open(init_path, "w") as f:
+ if not file_contents.startswith(imports):
+ f.write(imports + "\n\n\n")
+ if file_contents:
+ f.write(file_contents + "\n\n\n")
+ f.write(table_contents)
diff --git a/polyapi/prepare.py b/polyapi/prepare.py
index 565d139..b1580e2 100644
--- a/polyapi/prepare.py
+++ b/polyapi/prepare.py
@@ -138,11 +138,13 @@ def prepare_deployables(lazy: bool = False, disable_docs: bool = False, disable_
write_updated_deployable(deployable, disable_docs)
# Re-stage any updated staged files.
staged = subprocess.check_output('git diff --name-only --cached', shell=True, text=True, ).split('\n')
+ rootPath = subprocess.check_output('git rev-parse --show-toplevel', shell=True, text=True).replace('\n', '')
for deployable in dirty_deployables:
try:
- if deployable["file"] in staged:
- print(f'Staging {deployable["file"]}')
- subprocess.run(['git', 'add', deployable["file"]])
+ deployableName = deployable["file"].replace('\\', '/').replace(f"{rootPath}/", '')
+ if deployableName in staged:
+ print(f'Staging {deployableName}')
+ subprocess.run(['git', 'add', deployableName])
except:
print('Warning: File staging failed, check that all files are staged properly.')
diff --git a/polyapi/schema.py b/polyapi/schema.py
index 1523e7f..29ecbe3 100644
--- a/polyapi/schema.py
+++ b/polyapi/schema.py
@@ -93,7 +93,7 @@ def generate_schema_types(input_data: Dict, root=None):
with contextlib.redirect_stdout(None):
process_config(config, [tmp_input])
- with open(tmp_output) as f:
+ with open(tmp_output, encoding='utf-8') as f:
output = f.read()
output = clean_malformed_examples(output)
diff --git a/polyapi/sync.py b/polyapi/sync.py
index 9dcfac6..850538b 100644
--- a/polyapi/sync.py
+++ b/polyapi/sync.py
@@ -1,6 +1,7 @@
import os
from datetime import datetime
from typing import List, Dict
+from typing_extensions import cast # type: ignore
import requests
from polyapi.utils import get_auth_headers
@@ -23,19 +24,21 @@ def read_file(file_path: str) -> str:
return file.read()
def group_by(items: List[Dict], key: str) -> Dict[str, List[Dict]]:
- grouped = {}
+ grouped = {} # type: ignore
for item in items:
grouped.setdefault(item[key], []).append(item)
return grouped
def remove_deployable_function(deployable: SyncDeployment) -> bool:
api_key, _ = get_api_key_and_url()
+ if not api_key:
+ raise Exception("Missing api key!")
headers = get_auth_headers(api_key)
url = f'{deployable["instance"]}/functions/{deployable["type"].replace("-function", "")}/{deployable["id"]}'
response = requests.get(url, headers=headers)
if response.status_code != 200:
return False
- requests.delete(url, headers)
+ requests.delete(url, headers=headers)
return True
def remove_deployable(deployable: SyncDeployment) -> bool:
@@ -47,6 +50,8 @@ def remove_deployable(deployable: SyncDeployment) -> bool:
def sync_function_and_get_id(deployable: SyncDeployment, code: str) -> str:
api_key, _ = get_api_key_and_url()
+ if not api_key:
+ raise Error("Missing api key!")
headers = get_auth_headers(api_key)
url = f'{deployable["instance"]}/functions/{deployable["type"].replace("-function", "")}'
payload = {
@@ -129,15 +134,15 @@ def sync_deployables(dry_run: bool, instance: str | None = None):
else:
sync_deployment = { **deployable, "instance": instance }
if git_revision == deployable['gitRevision']:
- deployment = sync_deployable(sync_deployment)
+ deployment = sync_deployable(cast(SyncDeployment, sync_deployment))
if previous_deployment:
previous_deployment.update(deployment)
else:
deployable['deployments'].insert(0, deployment)
else:
- found = remove_deployable(sync_deployment)
+ found = remove_deployable(cast(SyncDeployment, sync_deployment))
action = 'NOT FOUND' if not found else action
- remove_index = all_deployables.index(deployable)
+ remove_index = all_deployables.index(cast(DeployableRecord, deployable))
to_remove.append(all_deployables.pop(remove_index))
print(f"{'Would sync' if dry_run else 'Synced'} {deployable['type'].replace('-', ' ')} {deployable['context']}.{deployable['name']}: {'TO BE ' if dry_run else ''}{action}")
diff --git a/polyapi/typedefs.py b/polyapi/typedefs.py
index b887103..7dac1bc 100644
--- a/polyapi/typedefs.py
+++ b/polyapi/typedefs.py
@@ -11,7 +11,7 @@ class PropertySpecification(TypedDict):
class PropertyType(TypedDict):
- kind: Literal['void', 'primitive', 'array', 'object', 'function', 'plain']
+ kind: Literal['void', 'primitive', 'array', 'object', 'function', 'plain', 'any']
spec: NotRequired[Dict]
name: NotRequired[str]
type: NotRequired[str]
@@ -35,7 +35,7 @@ class SpecificationDto(TypedDict):
description: str
# function is none (or function key not present) if this is actually VariableSpecDto
function: NotRequired[FunctionSpecification | None]
- type: Literal['apiFunction', 'customFunction', 'serverFunction', 'authFunction', 'webhookHandle', 'serverVariable']
+ type: Literal['apiFunction', 'customFunction', 'serverFunction', 'authFunction', 'webhookHandle', 'serverVariable', 'table']
code: NotRequired[str]
language: str
@@ -72,6 +72,17 @@ class SchemaSpecDto(TypedDict):
# TODO add more
+class TableSpecDto(TypedDict):
+ id: str
+ context: str
+ name: str
+ contextName: str
+ description: str
+ type: Literal['table']
+ schema: Dict[Any, Any]
+ unresolvedPolySchemaRefs: List
+
+
Visibility = Union[Literal['PUBLIC'], Literal['TENANT'], Literal['ENVIRONMENT']]
@@ -91,3 +102,99 @@ class PolyServerFunction(PolyDeployable):
class PolyClientFunction(PolyDeployable):
logs_enabled: NotRequired[bool]
visibility: NotRequired[Visibility]
+
+
+class Table(TypedDict):
+ id: str
+ createdAt: str
+ updatedAt: str
+
+
+class PolyCountResult(TypedDict):
+ count: int
+
+
+class PolyDeleteResults(TypedDict):
+ deleted: int
+
+
+
+QueryMode = Literal["default", "insensitive"]
+
+
+SortOrder = Literal["asc", "desc"]
+
+# Using functional form because of use of reserved keywords
+StringFilter = TypedDict("StringFilter", {
+ "equals": NotRequired[str],
+ "in": NotRequired[List[str]],
+ "not_in": NotRequired[List[str]],
+ "lt": NotRequired[str],
+ "lte": NotRequired[str],
+ "gt": NotRequired[str],
+ "gte": NotRequired[str],
+ "contains": NotRequired[str],
+ "starts_with": NotRequired[str],
+ "ends_with": NotRequired[str],
+ "mode": NotRequired[QueryMode],
+ "not": NotRequired[Union[str, "StringFilter"]],
+})
+
+# Using functional form because of use of reserved keywords
+NullableStringFilter = TypedDict("NullableStringFilter", {
+ "equals": NotRequired[Union[str, None]],
+ "in": NotRequired[List[str]],
+ "not_in": NotRequired[List[str]],
+ "lt": NotRequired[str],
+ "lte": NotRequired[str],
+ "gt": NotRequired[str],
+ "gte": NotRequired[str],
+ "contains": NotRequired[str],
+ "starts_with": NotRequired[str],
+ "ends_with": NotRequired[str],
+ "mode": NotRequired[QueryMode],
+ "not": NotRequired[Union[str, None, "NullableStringFilter"]],
+})
+
+# Using functional form because of use of reserved keywords
+NumberFilter = TypedDict("NumberFilter", {
+ "equals": NotRequired[Union[int, float]],
+ "in": NotRequired[List[Union[int, float]]],
+ "not_in": NotRequired[List[Union[int, float]]],
+ "lt": NotRequired[Union[int, float]],
+ "lte": NotRequired[Union[int, float]],
+ "gt": NotRequired[Union[int, float]],
+ "gte": NotRequired[Union[int, float]],
+ "not": NotRequired[Union[int, float, "NumberFilter"]],
+})
+
+# Using functional form because of use of reserved keywords
+NullableNumberFilter = TypedDict("NullableNumberFilter", {
+ "equals": NotRequired[Union[int, float, None]],
+ "in": NotRequired[List[Union[int, float]]],
+ "not_in": NotRequired[List[Union[int, float]]],
+ "lt": NotRequired[Union[int, float]],
+ "lte": NotRequired[Union[int, float]],
+ "gt": NotRequired[Union[int, float]],
+ "gte": NotRequired[Union[int, float]],
+ "not": NotRequired[Union[int, float, None, "NullableNumberFilter"]],
+})
+
+
+# Using functional form because of use of reserved keywords
+BooleanFilter = TypedDict("BooleanFilter", {
+ "equals": NotRequired[bool],
+ "not": NotRequired[Union[bool, "BooleanFilter"]],
+})
+
+# Using functional form because of use of reserved keywords
+NullableBooleanFilter = TypedDict("NullableBooleanFilter", {
+ "equals": NotRequired[Union[bool, None]],
+ "not": NotRequired[Union[bool, None, "NullableBooleanFilter"]],
+})
+
+# Using functional form because of use of reserved keywords
+NullableObjectFilter = TypedDict("NullableObjectFilter", {
+ "equals": NotRequired[None],
+ "not": NotRequired[Union[None, "NullableObjectFilter"]],
+})
diff --git a/polyapi/utils.py b/polyapi/utils.py
index 3b61f5e..4c803c0 100644
--- a/polyapi/utils.py
+++ b/polyapi/utils.py
@@ -1,9 +1,8 @@
import keyword
import re
import os
-import uuid
from urllib.parse import urlparse
-from typing import Tuple, List
+from typing import Tuple, List, Optional
from colorama import Fore, Style
from polyapi.constants import BASIC_PYTHON_TYPES
from polyapi.typedefs import PropertySpecification, PropertyType
@@ -20,15 +19,17 @@
CODE_IMPORTS = "from typing import List, Dict, Any, Optional, Callable\nfrom typing_extensions import TypedDict, NotRequired\nimport logging\nimport requests\nimport socketio # type: ignore\nfrom polyapi.config import get_api_key_and_url, get_direct_execute_config\nfrom polyapi.execute import execute, execute_post, variable_get, variable_update, direct_execute\n\n"
-def init_the_init(full_path: str, code_imports="") -> None:
+def init_the_init(full_path: str, code_imports: Optional[str] = None) -> None:
init_path = os.path.join(full_path, "__init__.py")
if not os.path.exists(init_path):
- code_imports = code_imports or CODE_IMPORTS
+ if code_imports is None:
+ code_imports = CODE_IMPORTS
with open(init_path, "w") as f:
f.write(code_imports)
-def add_import_to_init(full_path: str, next: str, code_imports="") -> None:
+def add_import_to_init(full_path: str, next: str, code_imports: Optional[str] = None) -> None:
+ init_the_init(full_path, code_imports=code_imports)
init_the_init(full_path, code_imports=code_imports)
init_path = os.path.join(full_path, "__init__.py")
diff --git a/polyapi/variables.py b/polyapi/variables.py
index 76975cc..1fb915d 100644
--- a/polyapi/variables.py
+++ b/polyapi/variables.py
@@ -19,10 +19,7 @@ def get() -> {variable_type}:
TEMPLATE = """
-import uuid
-
-
-client_id = uuid.uuid4().hex
+from polyapi.poly.client_id import client_id
class {variable_name}:{get_method}
diff --git a/polyapi/webhook.py b/polyapi/webhook.py
index 2f11707..8d68186 100644
--- a/polyapi/webhook.py
+++ b/polyapi/webhook.py
@@ -1,7 +1,6 @@
import asyncio
import socketio # type: ignore
from socketio.exceptions import ConnectionError # type: ignore
-import uuid
import logging
from typing import Any, Dict, List, Tuple
@@ -33,6 +32,7 @@ async def {function_name}(
Function ID: {function_id}
\"""
from polyapi.webhook import client, active_handlers
+ from polyapi.poly.client_id import client_id
print("Starting webhook handler for {function_path}...")
@@ -40,7 +40,7 @@ async def {function_name}(
raise Exception("Client not initialized. Abort!")
options = options or {{}}
- eventsClientId = "{client_id}"
+ eventsClientId = client_id
function_id = "{function_id}"
api_key, base_url = get_api_key_and_url()
@@ -131,7 +131,6 @@ def render_webhook_handle(
func_str = WEBHOOK_TEMPLATE.format(
description=function_description,
- client_id=uuid.uuid4().hex,
function_id=function_id,
function_name=function_name,
function_args=function_args,
diff --git a/pyproject.toml b/pyproject.toml
index 53041fb..18031a6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,7 +3,7 @@ requires = ["setuptools>=61.2", "wheel"]
[project]
name = "polyapi-python"
-version = "0.3.8"
+version = "0.3.9"
description = "The Python Client for PolyAPI, the IPaaS by Developers for Developers"
authors = [{ name = "Dan Fellin", email = "dan@polyapi.io" }]
dependencies = [
@@ -11,10 +11,10 @@ dependencies = [
"typing_extensions>=4.12.2",
"jsonschema-gentypes==2.6.0",
"pydantic>=2.8.0",
- "stdlib_list==0.10.0",
+ "stdlib_list>=0.10.0",
"colorama==0.4.4",
"python-socketio[asyncio_client]==5.11.1",
- "truststore==0.8.0",
+ "truststore>=0.8.0",
]
readme = "README.md"
license = { file = "LICENSE" }
diff --git a/requirements.txt b/requirements.txt
index d34defb..b5eb3c4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,7 +2,7 @@ requests>=2.32.3
typing_extensions>=4.10.0
jsonschema-gentypes==2.10.0
pydantic>=2.8.0
-stdlib_list==0.10.0
+stdlib_list>=0.10.0
colorama==0.4.4
python-socketio[asyncio_client]==5.11.1
-truststore==0.8.0
\ No newline at end of file
+truststore>=0.8.0
\ No newline at end of file
diff --git a/tests/test_schema.py b/tests/test_schema.py
index 223ec39..ae23ce3 100644
--- a/tests/test_schema.py
+++ b/tests/test_schema.py
@@ -1,5 +1,5 @@
import unittest
-from polyapi.schema import clean_malformed_examples, wrapped_generate_schema_types
+from polyapi.schema import clean_malformed_examples, wrapped_generate_schema_types, generate_schema_types
SCHEMA = {
"$schema": "http://json-schema.org/draft-06/schema#",
@@ -10,6 +10,14 @@
"definitions": {},
}
+CHARACTER_SCHEMA = {
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "type": "object",
+ "properties": {"CHARACTER_SCHEMA_NAME": {"description": "This is — “bad”, right?", "type": "string"}},
+ "additionalProperties": False,
+ "definitions": {},
+}
+
APALEO_MALFORMED_EXAMPLE = 'from typing import List, TypedDict, Union\nfrom typing_extensions import Required\n\n\n# Body.\n# \n# example: {\n "from": "2024-04-21",\n "to": "2024-04-24",\n "grossDailyRate": {\n "amount": 160.0,\n "currency": "EUR"\n },\n "timeSlices": [\n {\n "blockedUnits": 3\n },\n {\n "blockedUnits": 0\n },\n {\n "blockedUnits": 7\n }\n ]\n}\n# x-readme-ref-name: ReplaceBlockModel\nBody = TypedDict(\'Body\', {\n # Start date and time from which the inventory will be blockedSpecify either a pure date or a date and time (without fractional second part) in UTC or with UTC offset as defined in ISO8601:2004\n # \n # Required property\n \'from\': Required[str],\n # End date and time until which the inventory will be blocked. Cannot be more than 5 years after the start date.Specify either a pure date or a date and time (without fractional second part) in UTC or with UTC offset as defined in ISO8601:2004\n # \n # Required property\n \'to\': Required[str],\n # x-readme-ref-name: MonetaryValueModel\n # \n # Required property\n \'grossDailyRate\': Required["_BodygrossDailyRate"],\n # The list of time slices\n # \n # Required property\n \'timeSlices\': Required[List["_BodytimeSlicesitem"]],\n}, total=False)\n\n\nclass _BodygrossDailyRate(TypedDict, total=False):\n """ x-readme-ref-name: MonetaryValueModel """\n\n amount: Required[Union[int, float]]\n """\n format: double\n\n Required property\n """\n\n currency: Required[str]\n """ Required property """\n\n\n\nclass _BodytimeSlicesitem(TypedDict, total=False):\n """ x-readme-ref-name: CreateBlockTimeSliceModel """\n\n blockedUnits: Required[Union[int, float]]\n """\n Number of units blocked for the time slice\n\n format: int32\n\n Required property\n """\n\n'
@@ -23,4 +31,10 @@ def test_fix_titles(self):
def test_clean_malformed_examples(self):
output = clean_malformed_examples(APALEO_MALFORMED_EXAMPLE)
- self.assertNotIn("# example: {", output)
\ No newline at end of file
+ self.assertNotIn("# example: {", output)
+
+ def test_character_encoding(self):
+ output = generate_schema_types(CHARACTER_SCHEMA, "Dict")
+ expected = 'from typing import TypedDict\n\n\nclass Dict(TypedDict, total=False):\n CHARACTER_SCHEMA_NAME: str\n """ This is — “bad”, right? """\n\n'
+ self.assertEqual(output, expected)
+
\ No newline at end of file
diff --git a/tests/test_tabi.py b/tests/test_tabi.py
new file mode 100644
index 0000000..2bd21d3
--- /dev/null
+++ b/tests/test_tabi.py
@@ -0,0 +1,621 @@
+import unittest
+from polyapi.poly_tables import _render_table
+
+
+TABLE_SPEC_SIMPLE = {
+ "type": "table",
+ "id": "123456789",
+ "name": "MyTable",
+ "context": "some.context.here",
+ "contextName": "some.context.here.MyTable",
+ "description": "This table stores:\n - User name\n - User age\n - If user is active on the platform",
+ "schema": {
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "type": "object",
+ "properties": {
+ "id": { "type": "string" },
+ "createdAt": { "type": "string" },
+ "updatedAt": { "type": "string" },
+ "name": { "type": "string" },
+ "age": { "type": "integer" },
+ "active": { "type": "boolean" },
+ "optional": { "type": "object" }
+ },
+ "required": [
+ "id",
+ "createdAt",
+ "updatedAt",
+ "name",
+ "age",
+ "active"
+ ],
+ "additionalProperties": False,
+ }
+}
+
+EXPECTED_SIMPLE = '''
+MyTableColumns = Literal["id","createdAt","updatedAt","name","age","active","optional"]
+
+
+
+class MyTableRow(TypedDict, total=False):
+ id: Required[str]
+ """ Required property """
+
+ createdAt: Required[str]
+ """ Required property """
+
+ updatedAt: Required[str]
+ """ Required property """
+
+ name: Required[str]
+ """ Required property """
+
+ age: Required[int]
+ """ Required property """
+
+ active: Required[bool]
+ """ Required property """
+
+ optional: dict[str, Any]
+
+
+
+class MyTableSubset(TypedDict):
+ id: NotRequired[str]
+ createdAt: NotRequired[str]
+ updatedAt: NotRequired[str]
+ name: NotRequired[str]
+ age: NotRequired[int]
+ active: NotRequired[bool]
+ optional: NotRequired[Optional[Dict[str, Any]]]
+
+
+
+class MyTableWhereFilter(TypedDict):
+ id: NotRequired[Union[str, StringFilter]]
+ createdAt: NotRequired[Union[str, StringFilter]]
+ updatedAt: NotRequired[Union[str, StringFilter]]
+ name: NotRequired[Union[str, StringFilter]]
+ age: NotRequired[Union[int, NumberFilter]]
+ active: NotRequired[Union[bool, BooleanFilter]]
+ optional: NotRequired[Union[None, NullableObjectFilter]]
+ AND: NotRequired[Union["MyTableWhereFilter", List["MyTableWhereFilter"]]]
+ OR: NotRequired[List["MyTableWhereFilter"]]
+ NOT: NotRequired[Union["MyTableWhereFilter", List["MyTableWhereFilter"]]]
+
+
+
+class MyTableSelectManyQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+ order_by: NotRequired[Dict[MyTableColumns, SortOrder]]
+ limit: NotRequired[int]
+ offset: NotRequired[int]
+
+
+
+class MyTableSelectOneQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+ order_by: NotRequired[Dict[MyTableColumns, SortOrder]]
+
+
+
+class MyTableInsertOneQuery(TypedDict):
+ data: MyTableSubset
+
+
+
+class MyTableInsertManyQuery(TypedDict):
+ data: List[MyTableSubset]
+
+
+
+class MyTableUpdateManyQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+ data: MyTableSubset
+
+
+
+class MyTableDeleteQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+
+
+
+class MyTableQueryResults(TypedDict):
+ results: List[MyTableRow]
+ pagination: None # Pagination not yet supported
+
+
+
+class MyTableCountQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+
+
+
+class MyTable:
+ """This table stores:
+ - User name
+ - User age
+ - If user is active on the platform
+ """
+ table_id = "123456789"
+
+ @overload
+ @staticmethod
+ def count(query: MyTableCountQuery) -> PolyCountResult: ...
+ @overload
+ @staticmethod
+ def count(*, where: Optional[MyTableWhereFilter]) -> PolyCountResult: ...
+
+ @staticmethod
+ def count(*args, **kwargs) -> PolyCountResult:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query(MyTable.table_id, "count", transform_query(query))
+
+ @overload
+ @staticmethod
+ def select_many(query: MyTableSelectManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def select_many(*, where: Optional[MyTableWhereFilter], order_by: Optional[Dict[MyTableColumns, SortOrder]], limit: Optional[int], offset: Optional[int]) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def select_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if query.get('limit') is None:
+ query['limit'] = 1000
+ if query['limit'] > 1000:
+ raise ValueError("Cannot select more than 1000 rows at a time.")
+ return execute_query(MyTable.table_id, "select", transform_query(query))
+
+ @overload
+ @staticmethod
+ def select_one(query: MyTableSelectOneQuery) -> MyTableRow: ...
+ @overload
+ @staticmethod
+ def select_one(*, where: Optional[MyTableWhereFilter], order_by: Optional[Dict[MyTableColumns, SortOrder]]) -> MyTableRow: ...
+
+ @staticmethod
+ def select_one(*args, **kwargs) -> MyTableRow:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ query['limit'] = 1
+ return first_result(execute_query(MyTable.table_id, "select", transform_query(query)))
+
+ @overload
+ @staticmethod
+ def insert_many(query: MyTableInsertManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def insert_many(*, data: List[MyTableSubset]) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def insert_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if len(query['data']) > 1000:
+ raise ValueError("Cannot insert more than 1000 rows at a time.")
+ return execute_query(MyTable.table_id, "insert", query)
+
+ @overload
+ @staticmethod
+ def insert_one(query: MyTableInsertOneQuery) -> MyTableRow: ...
+ @overload
+ @staticmethod
+ def insert_one(*, data: MyTableSubset) -> MyTableRow: ...
+
+ @staticmethod
+ def insert_one(*args, **kwargs) -> MyTableRow:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return first_result(execute_query(MyTable.table_id, "insert", { 'data': [query['data']] }))
+
+ @overload
+ @staticmethod
+ def upsert_many(query: MyTableInsertManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def upsert_many(*, data: List[MyTableSubset]) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def upsert_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if len(data) > 1000:
+ raise ValueError("Cannot upsert more than 1000 rows at a time.")
+ return execute_query(MyTable.table_id, "upsert", query)
+
+ @overload
+ @staticmethod
+ def upsert_one(query: MyTableInsertOneQuery) -> MyTableRow: ...
+ @overload
+ @staticmethod
+ def upsert_one(*, data: MyTableSubset) -> MyTableRow: ...
+
+ @staticmethod
+ def upsert_one(*args, **kwargs) -> MyTableRow:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return first_result(execute_query(MyTable.table_id, "upsert", { 'data': [query['data']] }))
+
+ @overload
+ @staticmethod
+ def update_many(query: MyTableUpdateManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def update_many(*, where: Optional[MyTableWhereFilter], data: MyTableSubset) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def update_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query(MyTable.table_id, "update", transform_query(query))
+
+ @overload
+ @staticmethod
+ def delete_many(query: MyTableDeleteQuery) -> PolyDeleteResults: ...
+ @overload
+ @staticmethod
+ def delete_many(*, where: Optional[MyTableWhereFilter]) -> PolyDeleteResults: ...
+
+ @staticmethod
+ def delete_many(*args, **kwargs) -> PolyDeleteResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query(MyTable.table_id, "delete", query)
+'''
+
+TABLE_SPEC_COMPLEX = {
+ "type": "table",
+ "id": "123456789",
+ "name": "MyTable",
+ "context": "some.context.here",
+ "contextName": "some.context.here.MyTable",
+ "schema": {
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "type": "object",
+ "properties": {
+ "id": { "type": "string" },
+ "createdAt": { "type": "string" },
+ "updatedAt": { "type": "string" },
+ "data": {
+ "type": "object",
+ "properties": {
+ "foo": { "type": "string" },
+ "nested": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": { "name": { "type": "string" } },
+ "required": ["name"]
+ }
+ },
+ "other": { "x-poly-ref": { "path": "some.other.Schema" }}
+ }
+ }
+ },
+ "required": [
+ "id",
+ "createdAt",
+ "updatedAt",
+ "data"
+ ],
+ "additionalProperties": False,
+ }
+}
+
+EXPECTED_COMPLEX = '''
+MyTableColumns = Literal["id","createdAt","updatedAt","data"]
+
+
+
+class MyTableRow(TypedDict, total=False):
+ id: Required[str]
+ """ Required property """
+
+ createdAt: Required[str]
+ """ Required property """
+
+ updatedAt: Required[str]
+ """ Required property """
+
+ data: Required["_MyTableRowdata"]
+ """ Required property """
+
+
+
+class _MyTableRowdata(TypedDict, total=False):
+ foo: str
+ nested: list["_MyTableRowdatanesteditem"]
+ other: str | int | float | dict[str, Any] | list[Any] | bool | None
+ """
+ x-poly-ref:
+ path: some.other.Schema
+ """
+
+
+
+class _MyTableRowdatanesteditem(TypedDict, total=False):
+ name: Required[str]
+ """ Required property """
+
+
+
+class MyTableSubset(TypedDict):
+ id: NotRequired[str]
+ createdAt: NotRequired[str]
+ updatedAt: NotRequired[str]
+ data: NotRequired["_MyTableRowdata"]
+
+
+
+class MyTableWhereFilter(TypedDict):
+ id: NotRequired[Union[str, StringFilter]]
+ createdAt: NotRequired[Union[str, StringFilter]]
+ updatedAt: NotRequired[Union[str, StringFilter]]
+ AND: NotRequired[Union["MyTableWhereFilter", List["MyTableWhereFilter"]]]
+ OR: NotRequired[List["MyTableWhereFilter"]]
+ NOT: NotRequired[Union["MyTableWhereFilter", List["MyTableWhereFilter"]]]
+
+
+
+class MyTableSelectManyQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+ order_by: NotRequired[Dict[MyTableColumns, SortOrder]]
+ limit: NotRequired[int]
+ offset: NotRequired[int]
+
+
+
+class MyTableSelectOneQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+ order_by: NotRequired[Dict[MyTableColumns, SortOrder]]
+
+
+
+class MyTableInsertOneQuery(TypedDict):
+ data: MyTableSubset
+
+
+
+class MyTableInsertManyQuery(TypedDict):
+ data: List[MyTableSubset]
+
+
+
+class MyTableUpdateManyQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+ data: MyTableSubset
+
+
+
+class MyTableDeleteQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+
+
+
+class MyTableQueryResults(TypedDict):
+ results: List[MyTableRow]
+ pagination: None # Pagination not yet supported
+
+
+
+class MyTableCountQuery(TypedDict):
+ where: NotRequired[MyTableWhereFilter]
+
+
+
+class MyTable:
+ table_id = "123456789"
+
+ @overload
+ @staticmethod
+ def count(query: MyTableCountQuery) -> PolyCountResult: ...
+ @overload
+ @staticmethod
+ def count(*, where: Optional[MyTableWhereFilter]) -> PolyCountResult: ...
+
+ @staticmethod
+ def count(*args, **kwargs) -> PolyCountResult:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query(MyTable.table_id, "count", transform_query(query))
+
+ @overload
+ @staticmethod
+ def select_many(query: MyTableSelectManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def select_many(*, where: Optional[MyTableWhereFilter], order_by: Optional[Dict[MyTableColumns, SortOrder]], limit: Optional[int], offset: Optional[int]) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def select_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if query.get('limit') is None:
+ query['limit'] = 1000
+ if query['limit'] > 1000:
+ raise ValueError("Cannot select more than 1000 rows at a time.")
+ return execute_query(MyTable.table_id, "select", transform_query(query))
+
+ @overload
+ @staticmethod
+ def select_one(query: MyTableSelectOneQuery) -> MyTableRow: ...
+ @overload
+ @staticmethod
+ def select_one(*, where: Optional[MyTableWhereFilter], order_by: Optional[Dict[MyTableColumns, SortOrder]]) -> MyTableRow: ...
+
+ @staticmethod
+ def select_one(*args, **kwargs) -> MyTableRow:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ query['limit'] = 1
+ return first_result(execute_query(MyTable.table_id, "select", transform_query(query)))
+
+ @overload
+ @staticmethod
+ def insert_many(query: MyTableInsertManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def insert_many(*, data: List[MyTableSubset]) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def insert_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if len(query['data']) > 1000:
+ raise ValueError("Cannot insert more than 1000 rows at a time.")
+ return execute_query(MyTable.table_id, "insert", query)
+
+ @overload
+ @staticmethod
+ def insert_one(query: MyTableInsertOneQuery) -> MyTableRow: ...
+ @overload
+ @staticmethod
+ def insert_one(*, data: MyTableSubset) -> MyTableRow: ...
+
+ @staticmethod
+ def insert_one(*args, **kwargs) -> MyTableRow:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return first_result(execute_query(MyTable.table_id, "insert", { 'data': [query['data']] }))
+
+ @overload
+ @staticmethod
+ def upsert_many(query: MyTableInsertManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def upsert_many(*, data: List[MyTableSubset]) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def upsert_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ if len(data) > 1000:
+ raise ValueError("Cannot upsert more than 1000 rows at a time.")
+ return execute_query(MyTable.table_id, "upsert", query)
+
+ @overload
+ @staticmethod
+ def upsert_one(query: MyTableInsertOneQuery) -> MyTableRow: ...
+ @overload
+ @staticmethod
+ def upsert_one(*, data: MyTableSubset) -> MyTableRow: ...
+
+ @staticmethod
+ def upsert_one(*args, **kwargs) -> MyTableRow:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return first_result(execute_query(MyTable.table_id, "upsert", { 'data': [query['data']] }))
+
+ @overload
+ @staticmethod
+ def update_many(query: MyTableUpdateManyQuery) -> MyTableQueryResults: ...
+ @overload
+ @staticmethod
+ def update_many(*, where: Optional[MyTableWhereFilter], data: MyTableSubset) -> MyTableQueryResults: ...
+
+ @staticmethod
+ def update_many(*args, **kwargs) -> MyTableQueryResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query(MyTable.table_id, "update", transform_query(query))
+
+ @overload
+ @staticmethod
+ def delete_many(query: MyTableDeleteQuery) -> PolyDeleteResults: ...
+ @overload
+ @staticmethod
+ def delete_many(*, where: Optional[MyTableWhereFilter]) -> PolyDeleteResults: ...
+
+ @staticmethod
+ def delete_many(*args, **kwargs) -> PolyDeleteResults:
+ if args:
+ if len(args) != 1 or not isinstance(args[0], dict):
+ raise TypeError("Expected query as a single argument or as kwargs")
+ query = args[0]
+ else:
+ query = kwargs
+ return execute_query(MyTable.table_id, "delete", query)
+'''
+
+class T(unittest.TestCase):
+ def test_render_simple(self):
+ self.maxDiff = 20000
+ output = _render_table(TABLE_SPEC_SIMPLE)
+ self.assertEqual(output, EXPECTED_SIMPLE)
+
+ def test_render_complex(self):
+ self.maxDiff = 20000
+ output = _render_table(TABLE_SPEC_COMPLEX)
+ self.assertEqual(output, EXPECTED_COMPLEX)
\ No newline at end of file