I started to develop a bigger application that needs persistance.
Currently I have a Database class that contains the necessary methods to manage table creation/dropping. Retrieves sessions and creates the engine.
class Database:
"""
Database class that manages the database setup and sessions.
"""
def __init__(self):
try:
database_url = config.get("database_url")
if database_url:
self.engine = create_engine(
database_url,
pool_size=config.get("pool_size"), # Max number of connections
pool_timeout=config.get("pool_timeout"), # Time until a connection fails
)
self.Session = sessionmaker(bind=self.engine)
else:
raise Exception("database_url not found.")
except Exception as err:
raise err
def get_session(self):
return self.Session()
def create_tables(self):
"""
Creates all tables in the database.
"""
try:
Base.metadata.create_all(bind=self.engine)
except Exception as err:
raise err
def drop_tables(self):
"""
Drop all tables in the database.
"""
try:
Base.metadata.drop_all(bind=self.engine)
except Exception as err:
raise err
Although I don’t know if this is the best python way to this I have a few question related to the use of this class.
Inside my router.py
I have a dependency injection inside a POST endpoint. This dependency is defined as:
def get_db():
return Database()
So, this basically creates for each request a new Database() instance, is this correct? What are the best practices for doing this?
What is the best way to initialize the database inside my main.py
?
Thanks!
2
A better approach would be to implement a database client with a session factory inside and use it like this:
class DatabaseClient:
"""Database client."""
def __init__(self):
"""."""
async_engine = create_async_engine(
self.build_url(),
isolation_level=CONFIG.DB_ISOLATION_LEVEL,
echo=CONFIG.DB_ECHO,
pool_recycle=CONFIG.DB_POOL_RECYCLE,
pool_size=CONFIG.DB_POOL_SIZE,
max_overflow=CONFIG.DB_MAX_OVERFLOW,
)
self.session_factory = async_scoped_session(
sessionmaker(
autocommit=False,
autoflush=False,
bind=async_engine,
class_=AsyncSession,
expire_on_commit=False,
),
scopefunc=current_task,
)
@asynccontextmanager
async def get_session(self) -> AsyncContextManager[AsyncSession]:
"""Create new async session."""
session = self.session_factory()
try:
yield session
finally:
await session.close()
class ItemRepository:
"""Item repository."""
def __init__(self, db: DatabaseClient):
"""."""
self.db = db
async def get_item(self, item_id: UUID4) -> Item:
"""Get item by id."""
async with self.db.get_session() as session:
return (
await session.execute(...)
).scalar()
1
I think the best way is to create the Database
instance in the lifespan
and store it to the state
. Then get_db_session
dependency can get the instance of Database
from request.state
and create session.
from contextlib import asynccontextmanager
from typing import Annotated, cast
import fastapi
from fastapi.datastructures import State
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
config = {
"database_url": "sqlite://",
}
# The Database class' code here is just to make this code example runnable.
# This part of code just copied from the question.
class Database:
def __init__(self):
try:
database_url = config.get("database_url")
if database_url:
self.engine = create_engine(
database_url,
# pool_size=config.get("pool_size"), # Max number of connections
# pool_timeout=config.get("pool_timeout"), # Time until a connection fails
)
self.Session = sessionmaker(bind=self.engine)
else:
raise Exception("database_url not found.")
except Exception as err:
raise err
def get_session(self):
return self.Session()
class MyState(State):
db: Database
@asynccontextmanager
async def lifespan(app: fastapi.FastAPI):
db = Database()
yield {"db": db}
app = fastapi.FastAPI(lifespan=lifespan)
def get_db_session(request: fastapi.Request):
state = cast(MyState, request.state)
with state.db.get_session() as session:
yield session
@app.get("/")
def index(session: Annotated[Session, fastapi.Depends(get_db_session)]):
pass
2