Skip to content

Commit 4962be7

Browse files
authored
Merge pull request #2 from ArielMAJ/release/0.2.0
Release/0.2.0
2 parents 84f2452 + 71cec2a commit 4962be7

21 files changed

Lines changed: 1249 additions & 275 deletions

.env.example

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,12 @@ APPLICATION_HOST=127.0.0.1
22
APPLICATION_PORT=3000
33

44
APPLICATION_ROOT=
5+
6+
POSTGRES_USER=postgres
7+
POSTGRES_PASSWORD=postgres
8+
POSTGRES_HOST=localhost
9+
POSTGRES_DATABASE=postgres
10+
POSTGRES_PORT=5432
11+
12+
POSTGRES_ECHO=true
13+
DATABASE_ENABLE_CONNECTION_POOLING=true

.pre-commit-config.yaml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,8 @@ repos:
33
rev: "1.7.0"
44
hooks:
55
- id: poetry-check
6-
args: [--directory=backend]
76
- id: poetry-lock
8-
args: [--directory=backend]
97
- id: poetry-install
10-
args: [--directory=backend]
118
- repo: https://github.com/pre-commit/pre-commit-hooks
129
rev: v4.5.0
1310
hooks:

Makefile

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ ifneq ("$(wildcard .env)","")
33
export
44
endif
55

6-
76
.PHONY: run
87
run: ## Run the project.
98
poetry run python -m api
@@ -19,6 +18,30 @@ install: ## Install Python requirements.
1918
test: ## Run tests.
2019
ENVIRONMENT=test poetry run pytest --cov
2120

21+
.PHONY: up-database
22+
up-database: ## Start database container.
23+
docker compose up -d postgres --force-recreate
24+
25+
.PHONY: down
26+
down: ## Stop all containers.
27+
docker compose down
28+
29+
.PHONY: migrate
30+
migrate: ## Run database migrations.
31+
poetry run alembic upgrade head
32+
33+
.PHONY: revision
34+
revision: ## Create a new database migration.
35+
poetry run alembic revision --autogenerate -m "$(MESSAGE)"
36+
37+
.PHONY: docker-rm
38+
docker-rm: ## Remove all containers.
39+
docker rm -f $$(docker ps -a -q)
40+
41+
.PHONY: docker-rmi
42+
docker-rmi: ## Remove all images.
43+
docker rmi -f $$(docker images -q)
44+
2245
.PHONY: pre-commit
2346
pre-commit: ## Run pre-commit checks.
2447
poetry run pre-commit run --config ./.pre-commit-config.yaml

alembic.ini

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
# A generic, single database configuration.
2+
3+
[alembic]
4+
# path to migration scripts
5+
# Use forward slashes (/) also on windows to provide an os agnostic path
6+
script_location = api/database/alembic
7+
8+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
9+
# Uncomment the line below if you want the files to be prepended with date and time
10+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
11+
# for all available tokens
12+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
13+
14+
# sys.path path, will be prepended to sys.path if present.
15+
# defaults to the current working directory.
16+
prepend_sys_path = .
17+
18+
# timezone to use when rendering the date within the migration file
19+
# as well as the filename.
20+
# If specified, requires the python>=3.9 or backports.zoneinfo library.
21+
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
22+
# string value is passed to ZoneInfo()
23+
# leave blank for localtime
24+
# timezone =
25+
26+
# max length of characters to apply to the "slug" field
27+
# truncate_slug_length = 40
28+
29+
# set to 'true' to run the environment during
30+
# the 'revision' command, regardless of autogenerate
31+
# revision_environment = false
32+
33+
# set to 'true' to allow .pyc and .pyo files without
34+
# a source .py file to be detected as revisions in the
35+
# versions/ directory
36+
# sourceless = false
37+
38+
# version location specification; This defaults
39+
# to alembic/versions. When using multiple version
40+
# directories, initial revisions must be specified with --version-path.
41+
# The path separator used here should be the separator specified by "version_path_separator" below.
42+
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
43+
44+
# version path separator; As mentioned above, this is the character used to split
45+
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46+
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47+
# Valid values for version_path_separator are:
48+
#
49+
# version_path_separator = :
50+
# version_path_separator = ;
51+
# version_path_separator = space
52+
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53+
54+
# set to 'true' to search source files recursively
55+
# in each "version_locations" directory
56+
# new in Alembic version 1.10
57+
# recursive_version_locations = false
58+
59+
# the output encoding used when revision files
60+
# are written from script.py.mako
61+
# output_encoding = utf-8
62+
63+
64+
# sqlalchemy.url = postgresql+asyncpg://postgres:postgres@localhost:5432/postgres
65+
66+
67+
[post_write_hooks]
68+
# post_write_hooks defines scripts or Python functions that are run
69+
# on newly generated revision scripts. See the documentation for further
70+
# detail and examples
71+
72+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
73+
# hooks = black
74+
# black.type = console_scripts
75+
# black.entrypoint = black
76+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
77+
78+
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
79+
# hooks = ruff
80+
# ruff.type = exec
81+
# ruff.executable = %(here)s/.venv/bin/ruff
82+
# ruff.options = --fix REVISION_SCRIPT_FILENAME
83+
84+
# Logging configuration
85+
[loggers]
86+
keys = root,sqlalchemy,alembic
87+
88+
[handlers]
89+
keys = console
90+
91+
[formatters]
92+
keys = generic
93+
94+
[logger_root]
95+
level = WARN
96+
handlers = console
97+
qualname =
98+
99+
[logger_sqlalchemy]
100+
level = WARN
101+
handlers =
102+
qualname = sqlalchemy.engine
103+
104+
[logger_alembic]
105+
level = INFO
106+
handlers =
107+
qualname = alembic
108+
109+
[handler_console]
110+
class = StreamHandler
111+
args = (sys.stderr,)
112+
level = NOTSET
113+
formatter = generic
114+
115+
[formatter_generic]
116+
format = %(levelname)-5.5s [%(name)s] %(message)s
117+
datefmt = %H:%M:%S

api/app.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
from pathlib import Path
22

3+
from api.config import Config
34
from api.entrypoints.router import api_router
45
from fastapi import FastAPI
56
from fastapi.middleware.cors import CORSMiddleware
6-
from fastapi.responses import JSONResponse
7+
from fastapi_async_sqlalchemy import SQLAlchemyMiddleware
8+
9+
# from fastapi.responses import JSONResponse
710
from fastapi_cache import FastAPICache
811
from fastapi_cache.backends.inmemory import InMemoryBackend
912

@@ -20,7 +23,7 @@ def get_app() -> FastAPI:
2023
"""
2124
_app = FastAPI(
2225
title="fastapi-backend-template",
23-
default_response_class=JSONResponse,
26+
# default_response_class=JSONResponse,
2427
)
2528

2629
_app.add_middleware(
@@ -30,6 +33,12 @@ def get_app() -> FastAPI:
3033
allow_methods=["*"],
3134
allow_headers=["*"],
3235
)
36+
_app.add_middleware(
37+
SQLAlchemyMiddleware,
38+
db_url=Config.DATABASE.DB_URL,
39+
engine_args=Config.DATABASE.ENGINE_ARGS,
40+
commit_on_exit=True,
41+
)
3342
_app.include_router(router=api_router)
3443

3544
FastAPICache.init(InMemoryBackend(), prefix="fastapi-cache")

api/config.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,59 @@
44

55
import os
66

7+
from sqlalchemy import URL, AsyncAdaptedQueuePool, NullPool
8+
9+
10+
class DatabaseConfig:
11+
"""Database configuration."""
12+
13+
DATABASE_DRIVERNAME = os.getenv("DATABASE_DRIVERNAME", "postgresql+asyncpg")
14+
POSTGRES_USER = os.getenv("POSTGRES_USER", "postgres")
15+
POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "postgres")
16+
POSTGRES_HOST = os.getenv("POSTGRES_HOST", "localhost")
17+
POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432")
18+
POSTGRES_DATABASE = os.getenv("POSTGRES_DATABASE", "postgres")
19+
POSTGRES_URI = (
20+
f"{DATABASE_DRIVERNAME}:"
21+
f"//{POSTGRES_USER}:{POSTGRES_PASSWORD}"
22+
f"@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DATABASE}"
23+
)
24+
DB_URL = URL(
25+
drivername=DATABASE_DRIVERNAME,
26+
username=POSTGRES_USER,
27+
password=POSTGRES_PASSWORD,
28+
host=POSTGRES_HOST,
29+
port=POSTGRES_PORT,
30+
database=POSTGRES_DATABASE,
31+
query={},
32+
)
33+
POSTGRES_ECHO = os.getenv("POSTGRES_ECHO", "false").lower() == "true"
34+
POOL_PRE_PING = os.getenv("POOL_PRE_PING", True)
35+
POOL_SIZE = int(os.getenv("POOL_SIZE", 5))
36+
MAX_OVERFLOW = int(os.getenv("MAX_OVERFLOW", 10))
37+
38+
DATABASE_ENABLE_CONNECTION_POOLING = (
39+
os.getenv("DATABASE_ENABLE_CONNECTION_POOLING", "true").lower() == "true"
40+
)
41+
42+
_POOLING_ARGS = (
43+
{
44+
"poolclass": AsyncAdaptedQueuePool,
45+
"pool_size": POOL_SIZE,
46+
"max_overflow": MAX_OVERFLOW,
47+
}
48+
if DATABASE_ENABLE_CONNECTION_POOLING
49+
else {
50+
"poolclass": NullPool,
51+
}
52+
)
53+
54+
ENGINE_ARGS = {
55+
"echo": POSTGRES_ECHO,
56+
"pool_pre_ping": POOL_PRE_PING,
57+
**_POOLING_ARGS,
58+
}
59+
760

861
class Config:
962
"""Base configuration."""
@@ -20,6 +73,8 @@ class Config:
2073

2174
APPLICATION_ROOT = os.getenv("APPLICATION_ROOT", "")
2275

76+
DATABASE: DatabaseConfig = DatabaseConfig()
77+
2378

2479
class TestConfig(Config):
2580
"""Test configuration."""

api/database/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from api.database.models import users # noqa

api/database/alembic/README

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Generic single-database configuration with an async dbapi.

api/database/alembic/env.py

Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
import asyncio
2+
from logging.config import fileConfig
3+
4+
from alembic import context
5+
from api.config import Config
6+
from api.database.models.model_base import ModelBase
7+
from sqlalchemy import pool
8+
from sqlalchemy.engine import Connection
9+
from sqlalchemy.ext.asyncio import async_engine_from_config
10+
11+
# this is the Alembic Config object, which provides
12+
# access to the values within the .ini file in use.
13+
config = context.config
14+
config.set_main_option("sqlalchemy.url", Config.DATABASE.POSTGRES_URI)
15+
16+
17+
# Interpret the config file for Python logging.
18+
# This line sets up loggers basically.
19+
if config.config_file_name is not None:
20+
fileConfig(config.config_file_name)
21+
22+
# add your model's MetaData object here
23+
# for 'autogenerate' support
24+
# from myapp import mymodel
25+
# target_metadata = mymodel.Base.metadata
26+
target_metadata = ModelBase.metadata
27+
28+
# other values from the config, defined by the needs of env.py,
29+
# can be acquired:
30+
# my_important_option = config.get_main_option("my_important_option")
31+
# ... etc.
32+
33+
34+
def run_migrations_offline() -> None:
35+
"""Run migrations in 'offline' mode.
36+
37+
This configures the context with just a URL
38+
and not an Engine, though an Engine is acceptable
39+
here as well. By skipping the Engine creation
40+
we don't even need a DBAPI to be available.
41+
42+
Calls to context.execute() here emit the given string to the
43+
script output.
44+
45+
"""
46+
url = config.get_main_option("sqlalchemy.url")
47+
context.configure(
48+
url=url,
49+
target_metadata=target_metadata,
50+
literal_binds=True,
51+
dialect_opts={"paramstyle": "named"},
52+
)
53+
54+
with context.begin_transaction():
55+
context.run_migrations()
56+
57+
58+
def do_run_migrations(connection: Connection) -> None:
59+
context.configure(connection=connection, target_metadata=target_metadata)
60+
61+
with context.begin_transaction():
62+
context.run_migrations()
63+
64+
65+
async def run_async_migrations() -> None:
66+
"""In this scenario we need to create an Engine
67+
and associate a connection with the context.
68+
69+
"""
70+
71+
connectable = async_engine_from_config(
72+
config.get_section(config.config_ini_section, {}),
73+
prefix="sqlalchemy.",
74+
poolclass=pool.NullPool,
75+
)
76+
77+
async with connectable.connect() as connection:
78+
await connection.run_sync(do_run_migrations)
79+
80+
await connectable.dispose()
81+
82+
83+
def run_migrations_online() -> None:
84+
"""Run migrations in 'online' mode."""
85+
86+
asyncio.run(run_async_migrations())
87+
88+
89+
if context.is_offline_mode():
90+
run_migrations_offline()
91+
else:
92+
run_migrations_online()

0 commit comments

Comments
 (0)