Connecting to an in-memory SQLite database for testing -- table not found #10512
-
First Check
Commit to Help
Example Code# conftest.py
#
import pytest
import sqlite3
from sqlmodel import create_engine
from starlette.testclient import TestClient
from app import database
from app.auth import settings
from app.main import app
@pytest.fixture
def ro_client(in_memory_db):
client_instance = TestClient(app)
client_instance.headers["X-API-KEY"] = settings.ro_api_key
return client_instance
@pytest.fixture
def rw_client(in_memory_db):
client_instance = TestClient(app)
client_instance.headers["X-API-KEY"] = settings.rw_api_key
return client_instance
@pytest.fixture(autouse=True, scope="session")
def in_memory_db():
# Create a new in-memory database connection
memory_conn = sqlite3.connect(":memory:")
# Create a connection to the on-disk production database
production_conn = sqlite3.connect(f"{database.DB_PATH}{database.DB_DATABASE}")
# Use the .backup method to copy the entire on-disk database into the in-memory database
production_conn.backup(memory_conn)
# For debugging: Print the tables in the in-memory database
# memory_cursor = memory_conn.cursor()
# memory_cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
# tables = memory_cursor.fetchall()
# print("\nTables in in-memory database:", tables)
# Update the app's engine to use the in-memory SQLite database for the duration of the tests
test_engine = create_engine(
"sqlite:///:memory:", connect_args={"check_same_thread": False}, echo=True
)
test_engine.raw_connection().connection = memory_conn
database.engine = test_engine
yield # Yield control to the tests
# Cleanup
memory_conn.close()
production_conn.close()
#
# database.py
#
from sqlmodel import Session, create_engine
from app.config import get_settings
settings = get_settings()
DB_PATH = settings.db_path
DB_DATABASE = settings.db_name
DATABASE_URL = f"sqlite:///{DB_PATH}{DB_DATABASE}"
connect_args = {"check_same_thread": False}
# Allow for DATABASE_URL to be overridden (e.g., for testing)
def get_engine(database_url: str = DATABASE_URL, echo: bool = False):
return create_engine(database_url, echo=echo, connect_args=connect_args)
# Default engine
engine = get_engine(DATABASE_URL, echo=(not settings.production))
def get_session():
with Session(engine) as session:
yield sessionDescriptionI'm using SQLite for production, and for testing purpose I'd like to copy my production database into an in-memory SQLite database. I have confirmed that the
If both databases, the production one and the in-memory one, are the same then I'm not sure why the apikey table is not found. (I have confirmed that it exists in both databases.) Is it a problem with creating the engine? Operating SystemmacOS Operating System DetailsRunning inside Docker via Docker Desktop FastAPI Version0.104.0 Pydantic Version1.10.13 Python Version3.12.0 Additional ContextNo response |
Beta Was this translation helpful? Give feedback.
Replies: 2 comments 2 replies
-
|
I think I have the same problem. This works fine: import sqlmodel
import pprint
class Item(sqlmodel.SQLModel, table=True):
id: int | None = sqlmodel.Field(primary_key=True, default=None)
name: str
engine = sqlmodel.create_engine(
url='sqlite:///',
# echo=True,
connect_args={'check_same_thread': False})
sqlmodel.SQLModel.metadata.create_all(engine)
with sqlmodel.Session(engine) as db:
pprint.pprint(db.exec(sqlmodel.select(Item)).all())
db.add(Item(name='test1'))
db.add(Item(name='test2'))
db.commit()
pprint.pprint(db.exec(sqlmodel.select(Item)).all())This has a problem: import sqlmodel
import sqlmodel.ext.asyncio.session
import pprint
import fastapi
import typing
import uvicorn
class Item(sqlmodel.SQLModel, table=True):
id: int | None = sqlmodel.Field(primary_key=True, default=None)
name: str
engine = sqlmodel.create_engine(
url='sqlite:///',
# echo=True,
connect_args={'check_same_thread': False})
sqlmodel.SQLModel.metadata.create_all(engine)
with sqlmodel.Session(engine) as db:
pprint.pprint(db.exec(sqlmodel.select(Item)).all())
db.add(Item(name='test1'))
db.add(Item(name='test2'))
db.commit()
pprint.pprint(db.exec(sqlmodel.select(Item)).all())
app = fastapi.FastAPI()
def get_db() -> typing.Generator:
with sqlmodel.Session(engine) as db:
yield db
@app.get('/')
def f(db: sqlmodel.Session = fastapi.Depends(get_db)) -> list[Item]:
return db.exec(sqlmodel.select(Item)).all()
if __name__ == '__main__':
uvicorn.run(app)The error (boring parts elided): I'm testing on Debian 12, so the libraries are relatively old: Fundamentally, I think the problem is that the in-memory is thread-local or session-local, i.e. the fastapi routes see a different in-memory database to the non-fastapi code. I'll go re-read the sqlmodel and fastapi docs, because I feel like I saw the fix for this documented in there somewhere... |
Beta Was this translation helpful? Give feedback.
-
|
You have two different in-memory databases. and are two different databases. And you copy data only to first of them, but trying to use the second. From the SQLite documentation:
|
Beta Was this translation helpful? Give feedback.
You have two different in-memory databases.
Every time you connect to in-memory databese, it creates new separate database. So,
and
are two different databases. And you copy data only to first of them, but trying to use the second.
From the SQLite documentation: