diff --git a/docs/examples/usage/usage_sql_files_1.py b/docs/examples/usage/usage_sql_files_1.py new file mode 100644 index 00000000..c40a2440 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_1.py @@ -0,0 +1,47 @@ +from pathlib import Path + +from sqlspec import SQLFileLoader + +__all__ = ("create_loader", "test_loader_loads_queries") + + +def create_loader(tmp_path: Path) -> tuple[SQLFileLoader, list[str]]: + sql_dir = tmp_path / "sql" + sql_dir.mkdir() + + sql_file_1 = sql_dir / "queries" / "users.sql" + sql_file_1.parent.mkdir() + sql_file_1.write_text(""" + -- name: get_user_by_id) + SELECT * FROM users WHERE id = :user_id; + -- name: list_active_users + SELECT * FROM users WHERE active = 1; + -- name: create_user + INSERT INTO users (name, email) VALUES (:name, :email); + """) + # start-example + from sqlspec.loader import SQLFileLoader + + # Create loader + loader = SQLFileLoader() + + # Load SQL files + loader.load_sql(sql_file_1) + + # Or load from a directory + loader.load_sql(sql_dir) + + # List available queries + queries = loader.list_queries() + print(queries) # ['get_user_by_id', 'list_active_users', 'create_user', ...] + # end-example + return loader, queries + + +def test_loader_loads_queries(tmp_path: Path) -> None: + + loader, queries = create_loader(tmp_path) + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") + assert hasattr(loader, "list_queries") + assert isinstance(queries, list) diff --git a/docs/examples/usage/usage_sql_files_10.py b/docs/examples/usage/usage_sql_files_10.py new file mode 100644 index 00000000..92b02f81 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_10.py @@ -0,0 +1,29 @@ +from pathlib import Path + +__all__ = ("test_integration_with_sqlspec",) + + +def test_integration_with_sqlspec(tmp_path: Path) -> None: + tmp_sql_dir = tmp_path / "sql" + tmp_sql_dir.mkdir() + sql_file = tmp_sql_dir / "queries.sql" + sql_file.write_text(""" + -- name: get_user_by_id + SELECT * FROM users WHERE id = :user_id; + """) + # start-example + from sqlspec import SQLSpec + from sqlspec.loader import SQLFileLoader + + # Create loader + loader = SQLFileLoader() + loader.load_sql(tmp_path / "sql/") + + # Create SQLSpec with loader + spec = SQLSpec(loader=loader) + + # Access loader via SQLSpec + user_query = spec._sql_loader.get_sql("get_user_by_id") + # end-example + # Dummy asserts for doc example + assert user_query is not None diff --git a/docs/examples/usage/usage_sql_files_11.py b/docs/examples/usage/usage_sql_files_11.py new file mode 100644 index 00000000..bcc5722e --- /dev/null +++ b/docs/examples/usage/usage_sql_files_11.py @@ -0,0 +1,38 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader +from sqlspec import SQLSpec +from sqlspec.adapters.sqlite import SqliteConfig + +__all__ = ("test_type_safe_query_execution",) + + +def test_type_safe_query_execution(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + + from pydantic import BaseModel + + class User(BaseModel): + id: int + username: str + email: str + + # Load and execute with type safety + query = loader.get_sql("get_user_by_id") + + spec = SQLSpec(loader=loader) + config = SqliteConfig(pool_config={"database": ":memory:"}) + + with spec.provide_session(config) as session: + session.execute("""CREATE TABLE users ( id INTEGER PRIMARY KEY, username TEXT, email TEXT)""") + session.execute( + """ INSERT INTO users (id, username, email) VALUES (1, 'alice', 'alice@example.com'), (2, 'bob', 'bob@example.com');""" + ) + user: User = session.select_one(query, user_id=1, schema_type=User) + # end-example + # Dummy asserts for doc example + assert user.id == 1 + assert user.username == "alice" + assert user.email == "alice@example.com" + assert query is not None diff --git a/docs/examples/usage/usage_sql_files_12.py b/docs/examples/usage/usage_sql_files_12.py new file mode 100644 index 00000000..a46c66c3 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_12.py @@ -0,0 +1,54 @@ +from pathlib import Path + +__all__ = ("test_user_management_example",) + + +def test_user_management_example(tmp_path: Path) -> None: + user_sql_path = tmp_path / "sql" + user_sql_path.mkdir(parents=True, exist_ok=True) + user_sql_file = user_sql_path / "users.sql" + user_sql_file.write_text( + """-- name: create_user + INSERT INTO users (username, email, password_hash) VALUES (:username, :email, :password_hash) RETURNING id, username, email; + -- name: get_user + SELECT id, username, email FROM users WHERE id = :user_id; + -- name: list_users + SELECT id, username, email FROM users WHERE (:status IS NULL OR active = :status) LIMIT :limit OFFSET :offset; + """ + ) + # start-example + # Python code + from sqlspec import SQLSpec + from sqlspec.adapters.sqlite import SqliteConfig + from sqlspec.loader import SQLFileLoader + + loader = SQLFileLoader() + loader.load_sql(tmp_path / "sql/users.sql") + + spec = SQLSpec() + config = SqliteConfig() + spec.add_config(config) + + with spec.provide_session(config) as session: + session.execute( + """CREATE TABLE users ( id INTEGER PRIMARY KEY, username TEXT, email TEXT, password_hash TEXT, active BOOLEAN DEFAULT 1)""" + ) + # Create user + create_query = loader.get_sql("create_user") + result = session.execute( + create_query, username="irma", email="irma@example.com", password_hash="hashed_password" + ) + user = result.one() + user_id = user["id"] + + # Get user + get_query = loader.get_sql("get_user") + user = session.execute(get_query, user_id=user_id).one() + + # List users + list_query = loader.get_sql("list_users") + session.execute(list_query, status=True, limit=10, offset=0).data + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") + assert hasattr(spec, "add_config") diff --git a/docs/examples/usage/usage_sql_files_13.py b/docs/examples/usage/usage_sql_files_13.py new file mode 100644 index 00000000..5884a5cd --- /dev/null +++ b/docs/examples/usage/usage_sql_files_13.py @@ -0,0 +1,64 @@ +from pathlib import Path + +__all__ = ("test_analytics_queries_example",) + + +def test_analytics_queries_example(tmp_path: Path) -> None: + from docs.examples.usage.usage_sql_files_1 import create_loader + from sqlspec import SQLSpec + from sqlspec.adapters.sqlite import SqliteConfig + + loader, _queries = create_loader(tmp_path) + sql_analytics_path = tmp_path / "sql" + sql_analytics_path.mkdir(parents=True, exist_ok=True) + sql_analytics_file = sql_analytics_path / "analytics.sql" + sql_analytics_file.write_text( + """-- name: daily_sales + SELECT order_date, SUM(total_amount) AS total_sales + FROM orders + WHERE order_date BETWEEN :start_date AND :end_date + GROUP BY order_date; + -- name: top_products + SELECT product_id, SUM(quantity) AS total_sold + FROM order_items + WHERE order_date >= :start_date + GROUP BY product_id + ORDER BY total_sold DESC + LIMIT :limit; + """ + ) + # start-example + import datetime + + # Load analytics queries + loader.load_sql(tmp_path / "sql/analytics.sql") + + # Run daily sales report + sales_query = loader.get_sql("daily_sales") + config = SqliteConfig() + spec = SQLSpec() + spec.add_config(config) + with spec.provide_session(config) as session: + session.execute("""CREATE TABLE orders ( order_id INTEGER PRIMARY KEY, order_date DATE, total_amount REAL);""") + session.execute(""" + CREATE TABLE order_items ( order_item_id INTEGER PRIMARY KEY, order_id INTEGER, product_id INTEGER, quantity INTEGER, order_date DATE);""") + + # Insert sample data + session.execute(""" + INSERT INTO orders (order_id, order_date, total_amount) VALUES + (1, '2025-01-05', 150.00), + (2, '2025-01-15', 200.00), + (3, '2025-01-20', 250.00); + """) + session.execute(""" + INSERT INTO order_items (order_item_id, order_id, product_id, quantity, order_date) VALUES + (1, 1, 101, 2, '2025-01-05'), + (2, 2, 102, 3, '2025-01-15'), + (3, 3, 101, 1, '2025-01-20'); + """) + session.execute(sales_query, start_date=datetime.date(2025, 1, 1), end_date=datetime.date(2025, 2, 1)).data + + # Top products + products_query = loader.get_sql("top_products") + session.execute(products_query, start_date=datetime.date(2025, 1, 1), limit=10).data + # end-example diff --git a/docs/examples/usage/usage_sql_files_14.py b/docs/examples/usage/usage_sql_files_14.py new file mode 100644 index 00000000..df16ff2b --- /dev/null +++ b/docs/examples/usage/usage_sql_files_14.py @@ -0,0 +1,79 @@ +from pathlib import Path + +from pytest_databases.docker.postgres import PostgresService + +from sqlspec import SQLFileLoader +from sqlspec.adapters.asyncpg import AsyncpgConfig +from sqlspec.adapters.sqlite import SqliteConfig + +__all__ = ("test_multi_database_setup_example",) + + +async def test_multi_database_setup_example(tmp_path: Path, postgres_service: PostgresService) -> None: + user_sql_path_pg = tmp_path / "sql" / "postgres" + user_sql_path_pg.mkdir(parents=True, exist_ok=True) + user_sql_file_pg = user_sql_path_pg / "users.sql" + user_sql_file_pg.write_text( + """-- name: upsert_user + INSERT INTO users_sf1 (id, username, email) VALUES (:id, :username, :email) + ON CONFLICT (id) DO UPDATE SET username = EXCLUDED.username, email = EXCLUDED.email; + """ + ) + user_sql_path_sqlite = tmp_path / "sql" / "sqlite" + user_sql_path_sqlite.mkdir(parents=True, exist_ok=True) + user_sql_file_sqlite = user_sql_path_sqlite / "users.sql" + user_sql_file_sqlite.write_text( + """-- name: get_user + SELECT id, username, email FROM users_sf1 WHERE id = :user_id; + """ + ) + shared_sql_path = tmp_path / "sql" / "shared" + shared_sql_path.mkdir(parents=True, exist_ok=True) + shared_sql_file = shared_sql_path / "common.sql" + shared_sql_file.write_text( + """-- name: delete_user + DELETE FROM users_sf1 WHERE id = :user_id; + """ + ) + params = {"id": 1, "username": "john_doe", "email": "jd@example.com"} + + # start-example + # Different SQL files for different databases + loader = SQLFileLoader() + loader.load_sql(tmp_path / "sql/postgres/", tmp_path / "sql/sqlite/", tmp_path / "sql/shared/") + + # Queries automatically select correct dialect + pg_query = loader.get_sql("upsert_user") # Uses Postgres ON CONFLICT + sqlite_query = loader.get_sql("get_user") # Uses shared query + + from sqlspec import SQLSpec + + spec = SQLSpec() + postgres_config = AsyncpgConfig( + pool_config={ + "user": postgres_service.user, + "password": postgres_service.password, + "host": postgres_service.host, + "port": postgres_service.port, + "database": postgres_service.database, + } + ) + sqlite_config = SqliteConfig() + # Execute on appropriate database + async with spec.provide_session(postgres_config) as pg_session: + await pg_session.execute("""CREATE TABLE users_sf1 ( id INTEGER PRIMARY KEY, username TEXT, email TEXT)""") + await pg_session.execute( + """ INSERT INTO users_sf1 (id, username, email) VALUES (1, 'old_name', 'old@example.com');""" + ) + + await pg_session.execute(pg_query, **params) + + with spec.provide_session(sqlite_config) as sqlite_session: + sqlite_session.execute("""CREATE TABLE users_sf1 ( id INTEGER PRIMARY KEY, username TEXT, email TEXT)""") + sqlite_session.execute( + """ INSERT INTO users_sf1 (id, username, email) VALUES (1, 'john_doe', 'jd@example.com');""" + ) + sqlite_session.execute(sqlite_query, user_id=1) + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") diff --git a/docs/examples/usage/usage_sql_files_15.py b/docs/examples/usage/usage_sql_files_15.py new file mode 100644 index 00000000..6047ca72 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_15.py @@ -0,0 +1,20 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader +from sqlspec.exceptions import SQLFileNotFoundError + +__all__ = ("test_query_not_found",) + + +def test_query_not_found(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + try: + loader.get_sql("nonexistent_query") + except SQLFileNotFoundError: + print("Query not found. Available queries:") + print(loader.list_queries()) + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "get_sql") + assert hasattr(loader, "list_queries") diff --git a/docs/examples/usage/usage_sql_files_16.py b/docs/examples/usage/usage_sql_files_16.py new file mode 100644 index 00000000..a4e84aad --- /dev/null +++ b/docs/examples/usage/usage_sql_files_16.py @@ -0,0 +1,21 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_file_load_errors",) + + +def test_file_load_errors(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + from sqlspec.exceptions import SQLFileNotFoundError, SQLFileParseError + + try: + loader.load_sql("sql/queries.sql") + except SQLFileNotFoundError as e: + print(f"File not found: {e}") + except SQLFileParseError as e: + print(f"Failed to parse SQL file: {e}") + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") diff --git a/docs/examples/usage/usage_sql_files_17.py b/docs/examples/usage/usage_sql_files_17.py new file mode 100644 index 00000000..42cd3013 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_17.py @@ -0,0 +1,22 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_debugging_loaded_queries",) + + +def test_debugging_loaded_queries(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + # Print query SQL + query = loader.get_sql("create_user") + print(f"SQL: {query}") + print(f"Parameters: {query.parameters}") + + # Inspect file metadata + file_info = loader.get_file_for_query("create_user") + print(f"Loaded from: {file_info.path}") + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "get_sql") + assert query is not None diff --git a/docs/examples/usage/usage_sql_files_2.py b/docs/examples/usage/usage_sql_files_2.py new file mode 100644 index 00000000..e2a3e838 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_2.py @@ -0,0 +1,23 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_using_loaded_queries",) + + +def test_using_loaded_queries(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + from sqlspec import SQLSpec + from sqlspec.adapters.sqlite import SqliteConfig + + # Set up database + spec = SQLSpec() + config = SqliteConfig() + spec.add_config(config) + + # Get SQL with parameters + user_query = loader.get_sql("get_user_by_id") + # end-example + # Dummy asserts for doc example + assert user_query is not None # In real usage, loader must be defined diff --git a/docs/examples/usage/usage_sql_files_3.py b/docs/examples/usage/usage_sql_files_3.py new file mode 100644 index 00000000..d5f00ebd --- /dev/null +++ b/docs/examples/usage/usage_sql_files_3.py @@ -0,0 +1,25 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_add_queries_programmatically",) + + +def test_add_queries_programmatically(tmp_path: Path) -> None: + loader, _ = create_loader(tmp_path) + # start-example + # Add a query at runtime + loader.add_named_sql("health_check", "SELECT 'OK' as status, CURRENT_TIMESTAMP as timestamp") + + # Add with dialect + loader.add_named_sql("postgres_version", "SELECT version()", dialect="postgres") + + # Use the added query + health_sql = loader.get_sql("health_check") + # end-example + # Dummy asserts for doc example + assert health_sql is not None + assert "SELECT 'OK' as status" in health_sql.sql + postgres_sql = loader.get_sql("postgres_version") + assert postgres_sql is not None + assert "SELECT version()" in postgres_sql.sql diff --git a/docs/examples/usage/usage_sql_files_4.py b/docs/examples/usage/usage_sql_files_4.py new file mode 100644 index 00000000..575b7ff7 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_4.py @@ -0,0 +1,26 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_query_metadata",) + + +def test_query_metadata(tmp_path: Path) -> None: + loader, _ = create_loader(tmp_path) + # start-example + # Get file info for a query + file_info = loader.get_file_for_query("get_user_by_id") + if file_info: + print(f"Query from: {file_info.path}") + print(f"Checksum: {file_info.checksum}") + print(f"Loaded at: {file_info.loaded_at}") + + # Get all queries from a specific file + loader.get_file(tmp_path / "sql/queries/users.sql") + # if file_obj: + # print(f"Contains {len(file_obj.queries)} queries") + # for query in file_obj.queries: + # print(f" - {query.name}") + # end-example + # Dummy asserts for doc example + assert hasattr(file_info, "path") diff --git a/docs/examples/usage/usage_sql_files_5.py b/docs/examples/usage/usage_sql_files_5.py new file mode 100644 index 00000000..80254ded --- /dev/null +++ b/docs/examples/usage/usage_sql_files_5.py @@ -0,0 +1,25 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_caching_behavior",) + + +def test_caching_behavior(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + # First load - reads from disk + loader.load_sql(tmp_path / "sql/queries/users.sql") + + # Second load - uses cache (file already loaded) + loader.load_sql(tmp_path / "sql/queries/users.sql") + + # Clear cache + loader.clear_cache() + + # Force reload from disk + loader.load_sql(tmp_path / "sql/queries/users.sql") + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") + assert hasattr(loader, "clear_cache") diff --git a/docs/examples/usage/usage_sql_files_6.py b/docs/examples/usage/usage_sql_files_6.py new file mode 100644 index 00000000..8ac87b67 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_6.py @@ -0,0 +1,16 @@ +from sqlspec.loader import SQLFileLoader + +__all__ = ("test_loading_directories_with_mixed_files",) + + +def test_loading_directories_with_mixed_files() -> None: + # start-example + loader = SQLFileLoader() + loader.load_sql("migrations/") # Only loads queries.sql + + # Check what was loaded + queries = loader.list_queries() # Only returns named queries + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") + assert isinstance(queries, list) diff --git a/docs/examples/usage/usage_sql_files_7.py b/docs/examples/usage/usage_sql_files_7.py new file mode 100644 index 00000000..c07ca115 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_7.py @@ -0,0 +1,24 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_local_files_loading",) + + +def test_local_files_loading(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + + # start-example + from pathlib import Path + + # Load from Path object + loader.load_sql(Path(tmp_path / "sql/queries/users.sql")) + + # Load from string path + loader.load_sql(tmp_path / "sql/queries/users.sql") + + # Load directory + loader.load_sql(tmp_path / "sql/") + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") diff --git a/docs/examples/usage/usage_sql_files_8.py b/docs/examples/usage/usage_sql_files_8.py new file mode 100644 index 00000000..cb378c7c --- /dev/null +++ b/docs/examples/usage/usage_sql_files_8.py @@ -0,0 +1,27 @@ +from pathlib import Path + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_file_uris_loading",) + + +def test_file_uris_loading(tmp_path: Path) -> None: + loader, _ = create_loader(tmp_path) + # copy the sql directory to an absolute path for testing + absolute_sql_path = tmp_path / "absolute_sql" + absolute_sql_path.mkdir() + absolute_sql_file = absolute_sql_path / "queries.sql" + absolute_sql_file.write_text(""" + -- name: another_query + SELECT 1; + """) + # start-example + # Load from file:// URI + # change the path below to an absolute path on your system + loader.load_sql(f"file://{absolute_sql_file.resolve()}") + + # Load from relative file URI + loader.load_sql(f"file://{absolute_sql_path}/queries.sql") + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") diff --git a/docs/examples/usage/usage_sql_files_9.py b/docs/examples/usage/usage_sql_files_9.py new file mode 100644 index 00000000..fed8fa85 --- /dev/null +++ b/docs/examples/usage/usage_sql_files_9.py @@ -0,0 +1,27 @@ +from pathlib import Path + +import pytest + +from docs.examples.usage.usage_sql_files_1 import create_loader + +__all__ = ("test_cloud_storage_loading",) + + +@pytest.mark.skipif(reason="need to find a way to mock cloud storage") +def test_cloud_storage_loading(tmp_path: Path) -> None: + loader, _queries = create_loader(tmp_path) + # start-example + # S3 + loader.load_sql("s3://my-bucket/sql/users.sql") + + # Google Cloud Storage + loader.load_sql("gs://my-bucket/sql/users.sql") + + # Azure Blob Storage + loader.load_sql("az://my-container/sql/users.sql") + + # HTTP/HTTPS + loader.load_sql("https://example.com/queries/users.sql") + # end-example + # Dummy asserts for doc example + assert hasattr(loader, "load_sql") diff --git a/docs/usage/sql_files.rst b/docs/usage/sql_files.rst index dcbf5334..1daf54d5 100644 --- a/docs/usage/sql_files.rst +++ b/docs/usage/sql_files.rst @@ -67,43 +67,24 @@ Create a SQL file with named queries using ``-- name:`` comments: Loading SQL Files ^^^^^^^^^^^^^^^^^ -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_1.py + :language: python + :start-after: # start-example + :end-before: # end-example + :caption: `Loading SQL files with SQLFileLoader` + :dedent: 2 - from sqlspec.loader import SQLFileLoader - - # Create loader - loader = SQLFileLoader() - - # Load SQL files - loader.load_sql("sql/users.sql", "sql/products.sql", "sql/orders.sql") - - # Or load from a directory - loader.load_sql("sql/") - - # List available queries - queries = loader.list_queries() - print(queries) # ['get_user_by_id', 'list_active_users', 'create_user', ...] Using Loaded Queries ^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python - - from sqlspec import SQLSpec - from sqlspec.adapters.sqlite import SqliteConfig - - # Set up database - spec = SQLSpec() - config = SqliteConfig() - spec.add_config(config) +.. literalinclude:: ../examples/usage/usage_sql_files_2.py + :language: python + :start-after: # start-example + :end-before: # end-example + :caption: `Using loaded queries from SQLFileLoader` + :dedent: 2 - # Get SQL with parameters - user_query = loader.get_sql("get_user_by_id", user_id=123) - - # Execute with session - with spec.provide_session(config) as session: - result = session.execute(user_query) - user = result.one() Query Naming Conventions ------------------------ @@ -194,63 +175,39 @@ Advanced Features Adding Queries Programmatically ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python - - # Add a query at runtime - loader.add_named_sql( - "health_check", - "SELECT 'OK' as status, CURRENT_TIMESTAMP as timestamp" - ) - - # Add with dialect - loader.add_named_sql( - "postgres_version", - "SELECT version()", - dialect="postgres" - ) +.. literalinclude:: ../examples/usage/usage_sql_files_3.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Adding queries programmatically to SQLFileLoader` - # Use the added query - health_sql = loader.get_sql("health_check") Query Metadata ^^^^^^^^^^^^^^ Get information about loaded queries: -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_4.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Retrieving query metadata from SQLFileLoader` - # Get file info for a query - file_info = loader.get_file_for_query("get_user_by_id") - if file_info: - print(f"Query from: {file_info.path}") - print(f"Checksum: {file_info.checksum}") - print(f"Loaded at: {file_info.loaded_at}") - - # Get all queries from a specific file - file_obj = loader.get_file("sql/users.sql") - if file_obj: - print(f"Contains {len(file_obj.queries)} queries") - for query in file_obj.queries: - print(f" - {query.name}") Caching Behavior ^^^^^^^^^^^^^^^^ The loader implements intelligent caching with 12x+ performance improvements: -.. code-block:: python - - # First load - reads from disk - loader.load_sql("sql/users.sql") - - # Second load - uses cache (file already loaded) - loader.load_sql("sql/users.sql") +.. literalinclude:: ../examples/usage/usage_sql_files_5.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Using caching with SQLFileLoader` - # Clear cache - loader.clear_cache() - - # Force reload from disk - loader.load_sql("sql/users.sql") **Cache Features** @@ -271,13 +228,12 @@ The loader gracefully handles directories containing both named query files and queries.sql # Named queries → loaded seed-data.sql # Raw DML (no -- name:) → skipped -.. code-block:: python - - loader = SQLFileLoader() - loader.load_sql("migrations/") # Only loads queries.sql - - # Check what was loaded - queries = loader.list_queries() # Only returns named queries +.. literalinclude:: ../examples/usage/usage_sql_files_6.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Loading a directory with mixed SQL files using SQLFileLoader` **How it works:** @@ -308,48 +264,37 @@ The loader supports multiple storage backends for loading SQL files. Local Files ^^^^^^^^^^^ -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_7.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Loading SQL files from local filesystem using SQLFileLoader` - from pathlib import Path - - # Load from Path object - loader.load_sql(Path("sql/users.sql")) - - # Load from string path - loader.load_sql("sql/users.sql") - - # Load directory - loader.load_sql("sql/") File URIs ^^^^^^^^^ -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_8.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Loading SQL files from file URIs using SQLFileLoader` - # Load from file:// URI - loader.load_sql("file:///absolute/path/to/queries.sql") - - # Load from relative file URI - loader.load_sql("file://sql/users.sql") Cloud Storage (with fsspec) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ When ``fsspec`` is installed, load from cloud storage: -.. code-block:: python - - # S3 - loader.load_sql("s3://my-bucket/sql/users.sql") +.. literalinclude:: ../examples/usage/usage_sql_files_9.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Loading SQL files from cloud storage using SQLFileLoader` - # Google Cloud Storage - loader.load_sql("gs://my-bucket/sql/users.sql") - - # Azure Blob Storage - loader.load_sql("az://my-container/sql/users.sql") - - # HTTP/HTTPS - loader.load_sql("https://example.com/queries/users.sql") Integration with SQLSpec ------------------------- @@ -359,43 +304,26 @@ Loader with SQLSpec Instance Create a SQLSpec instance with an integrated loader: -.. code-block:: python - - from sqlspec import SQLSpec - from sqlspec.loader import SQLFileLoader +.. literalinclude:: ../examples/usage/usage_sql_files_10.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Creating a SQLSpec instance with SQLFileLoader` - # Create loader - loader = SQLFileLoader() - loader.load_sql("sql/") - - # Create SQLSpec with loader - spec = SQLSpec(loader=loader) - - # Access loader via SQLSpec - user_query = spec.loader.get_sql("get_user_by_id", user_id=1) Type-Safe Query Execution ^^^^^^^^^^^^^^^^^^^^^^^^^^ Combine loaded queries with schema mapping: -.. code-block:: python - - from pydantic import BaseModel - from datetime import datetime +.. literalinclude:: ../examples/usage/usage_sql_files_11.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Executing type-safe queries from SQLFileLoader with SQLSpec` - class User(BaseModel): - id: int - username: str - email: str - created_at: datetime - - # Load and execute with type safety - query = loader.get_sql("get_user_by_id", user_id=1) - - with spec.provide_session(config) as session: - result = session.execute(query, schema_type=User) - user: User = result.one() # Fully typed! Practical Examples ------------------ @@ -435,39 +363,13 @@ Example 1: User Management -- name: delete_user DELETE FROM users WHERE id = :user_id; -.. code-block:: python - - # Python code - from sqlspec import SQLSpec - from sqlspec.loader import SQLFileLoader - from sqlspec.adapters.sqlite import SqliteConfig - - loader = SQLFileLoader() - loader.load_sql("sql/users.sql") - - spec = SQLSpec() - config = SqliteConfig() - spec.add_config(config) +.. literalinclude:: ../examples/usage/usage_sql_files_12.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Using user management queries from SQLFileLoader` - with spec.provide_session(config) as session: - # Create user - create_query = loader.get_sql( - "create_user", - username="alice", - email="alice@example.com", - password_hash="hashed_password" - ) - result = session.execute(create_query) - user = result.one() - user_id = user['id'] - - # Get user - get_query = loader.get_sql("get_user", user_id=user_id) - user = session.execute(get_query).one() - - # List users - list_query = loader.get_sql("list_users", status=True, limit=10, offset=0) - users = session.execute(list_query).data Example 2: Analytics Queries ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -529,44 +431,24 @@ Example 2: Analytics Queries WHERE co.total_spent > :min_spent ORDER BY co.total_spent DESC; -.. code-block:: python - - import datetime - - # Load analytics queries - loader.load_sql("sql/analytics.sql") - - # Run daily sales report - sales_query = loader.get_sql( - "daily_sales", - start_date=datetime.date(2025, 1, 1), - end_date=datetime.date(2025, 2, 1) - ) - sales = session.execute(sales_query).data +.. literalinclude:: ../examples/usage/usage_sql_files_13.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Using analytics queries from SQLFileLoader` - # Top products - products_query = loader.get_sql("top_products", start_date=datetime.date(2025, 1, 1), limit=10) - top_products = session.execute(products_query).data Example 3: Multi-Database Setup ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_14.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Using SQLFileLoader with multiple database dialects` - # Different SQL files for different databases - loader = SQLFileLoader() - loader.load_sql("sql/postgres/", "sql/sqlite/", "sql/shared/") - - # Queries automatically select correct dialect - pg_query = loader.get_sql("upsert_user") # Uses Postgres ON CONFLICT - sqlite_query = loader.get_sql("get_user") # Uses shared query - - # Execute on appropriate database - async with spec.provide_session(postgres_config) as pg_session: - await pg_session.execute(pg_query, **params) - - with spec.provide_session(sqlite_config) as sqlite_session: - sqlite_session.execute(sqlite_query, user_id=1) Best Practices -------------- @@ -641,41 +523,35 @@ Troubleshooting Query Not Found ^^^^^^^^^^^^^^^ -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_15.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Handling query not found errors with SQLFileLoader` - try: - query = loader.get_sql("nonexistent_query") - except KeyError: - print("Query not found. Available queries:") - print(loader.list_queries()) File Load Errors ^^^^^^^^^^^^^^^^ -.. code-block:: python +.. literalinclude:: ../examples/usage/usage_sql_files_16.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Handling file load errors with SQLFileLoader` - from sqlspec.exceptions import SQLFileNotFoundError, SQLFileParseError - - try: - loader.load_sql("sql/queries.sql") - except SQLFileNotFoundError as e: - print(f"File not found: {e}") - except SQLFileParseError as e: - print(f"Failed to parse SQL file: {e}") Debugging Loaded Queries ^^^^^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python - - # Print query SQL - query = loader.get_sql("get_user", user_id=1) - print(f"SQL: {query}") - print(f"Parameters: {query.parameters}") +.. literalinclude:: ../examples/usage/usage_sql_files_17.py + :language: python + :start-after: # start-example + :end-before: # end-example + :dedent: 2 + :caption: `Debugging loaded queries with SQLFileLoader` - # Inspect file metadata - file_info = loader.get_file_for_query("get_user") - print(f"Loaded from: {file_info.path}") Next Steps ----------