Skip to content

Commit 88602e4

Browse files
authored
Merge pull request #463 from DataIntegrationGroup/feature/356-alembic-migration-tests
feat: add integration tests for Alembic migrations
2 parents cba8d9e + bfea68b commit 88602e4

3 files changed

Lines changed: 484 additions & 1 deletion

File tree

admin/views/chemistry_sampleinfo.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
"""
3030

3131
from starlette.requests import Request
32-
from starlette_admin import HasOne
32+
from starlette_admin.fields import HasOne
3333

3434
from admin.views.base import OcotilloModelView
3535

Lines changed: 373 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,373 @@
1+
# ===============================================================================
2+
# Copyright 2026
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License");
5+
# you may not use this file except in compliance with the License.
6+
# You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
# ===============================================================================
16+
"""
17+
Integration tests for Alembic migrations.
18+
19+
Tests that:
20+
1. Migrations run successfully (upgrade head)
21+
2. Expected tables and columns exist after migration
22+
3. Migration history is consistent
23+
4. Downgrade paths work (optional, selected migrations)
24+
25+
These tests ensure CI catches migration errors before merge and that
26+
schema drift between models and migrations is detected.
27+
28+
Related: GitHub Issue #356
29+
"""
30+
31+
import os
32+
33+
import pytest
34+
from alembic import command
35+
from alembic.config import Config
36+
from alembic.script import ScriptDirectory
37+
from sqlalchemy import inspect, text
38+
39+
from db.engine import engine, session_ctx
40+
41+
42+
def _alembic_config() -> Config:
43+
"""Get Alembic configuration pointing to project root."""
44+
root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
45+
cfg = Config(os.path.join(root, "alembic.ini"))
46+
cfg.set_main_option("script_location", os.path.join(root, "alembic"))
47+
return cfg
48+
49+
50+
# =============================================================================
51+
# Migration History Tests
52+
# =============================================================================
53+
54+
55+
class TestMigrationHistory:
56+
"""Tests for migration script consistency."""
57+
58+
def test_migrations_have_no_multiple_heads(self):
59+
"""
60+
Migration history should have a single head (no branching).
61+
62+
Multiple heads indicate parallel migrations that need to be merged.
63+
"""
64+
config = _alembic_config()
65+
script = ScriptDirectory.from_config(config)
66+
heads = script.get_heads()
67+
68+
assert len(heads) == 1, (
69+
f"Multiple migration heads detected: {heads}. "
70+
"Run 'alembic merge heads' to resolve."
71+
)
72+
73+
def test_all_migrations_have_down_revision(self):
74+
"""
75+
All migrations except the first should have a down_revision.
76+
77+
This ensures the migration chain is unbroken.
78+
"""
79+
config = _alembic_config()
80+
script = ScriptDirectory.from_config(config)
81+
82+
revisions_without_down = []
83+
base_found = False
84+
85+
for rev in script.walk_revisions():
86+
if rev.down_revision is None:
87+
if base_found:
88+
revisions_without_down.append(rev.revision)
89+
base_found = True
90+
91+
assert (
92+
not revisions_without_down
93+
), f"Migrations missing down_revision (besides base): {revisions_without_down}"
94+
95+
def test_current_revision_matches_head(self):
96+
"""
97+
Database should be at the latest migration head.
98+
99+
This verifies that test setup ran migrations successfully.
100+
"""
101+
config = _alembic_config()
102+
script = ScriptDirectory.from_config(config)
103+
head = script.get_current_head()
104+
105+
with engine.connect() as conn:
106+
result = conn.execute(text("SELECT version_num FROM alembic_version"))
107+
current = result.scalar()
108+
109+
assert current == head, (
110+
f"Database at revision {current}, expected head {head}. "
111+
"Run 'alembic upgrade head'."
112+
)
113+
114+
115+
# =============================================================================
116+
# Schema Verification Tests
117+
# =============================================================================
118+
119+
120+
class TestSchemaAfterMigration:
121+
"""Tests that verify expected schema exists after migrations."""
122+
123+
@pytest.fixture(autouse=True)
124+
def inspector(self):
125+
"""Provide SQLAlchemy inspector for schema introspection."""
126+
self._inspector = inspect(engine)
127+
yield
128+
self._inspector = None
129+
130+
def test_core_tables_exist(self):
131+
"""Core application tables should exist after migration."""
132+
expected_tables = [
133+
"location",
134+
"thing",
135+
"observation",
136+
"sample",
137+
"sensor",
138+
"contact",
139+
"field_event",
140+
"field_activity",
141+
"group",
142+
"asset",
143+
"parameter",
144+
"lexicon_term",
145+
"lexicon_category",
146+
]
147+
148+
existing_tables = self._inspector.get_table_names()
149+
150+
missing = [t for t in expected_tables if t not in existing_tables]
151+
assert not missing, f"Missing core tables: {missing}"
152+
153+
def test_legacy_nma_tables_exist(self):
154+
"""Legacy NMA tables should exist for data migration support."""
155+
expected_nma_tables = [
156+
"NMA_Chemistry_SampleInfo",
157+
"NMA_MajorChemistry",
158+
"NMA_MinorTraceChemistry",
159+
"NMA_FieldParameters",
160+
"NMA_HydraulicsData",
161+
"NMA_Stratigraphy",
162+
"NMA_Radionuclides",
163+
"NMA_AssociatedData",
164+
"NMA_WeatherData",
165+
]
166+
167+
existing_tables = self._inspector.get_table_names()
168+
169+
missing = [t for t in expected_nma_tables if t not in existing_tables]
170+
assert not missing, f"Missing NMA legacy tables: {missing}"
171+
172+
def test_thing_table_has_required_columns(self):
173+
"""Thing table should have all required columns."""
174+
columns = {c["name"] for c in self._inspector.get_columns("thing")}
175+
176+
required_columns = [
177+
"id",
178+
"name",
179+
"thing_type",
180+
"release_status",
181+
"created_at",
182+
"nma_pk_welldata",
183+
"nma_pk_location",
184+
]
185+
186+
missing = [c for c in required_columns if c not in columns]
187+
assert not missing, f"Thing table missing columns: {missing}"
188+
189+
def test_location_table_has_geometry_column(self):
190+
"""Location table should have PostGIS geometry column."""
191+
columns = {c["name"] for c in self._inspector.get_columns("location")}
192+
193+
assert "point" in columns, "Location table missing 'point' geometry column"
194+
195+
def test_observation_table_has_required_columns(self):
196+
"""Observation table should have all required columns."""
197+
columns = {c["name"] for c in self._inspector.get_columns("observation")}
198+
199+
required_columns = [
200+
"id",
201+
"observation_datetime",
202+
"value",
203+
"unit",
204+
"sample_id",
205+
"release_status",
206+
]
207+
208+
missing = [c for c in required_columns if c not in columns]
209+
assert not missing, f"Observation table missing columns: {missing}"
210+
211+
def test_alembic_version_table_exists(self):
212+
"""Alembic version tracking table should exist."""
213+
tables = self._inspector.get_table_names()
214+
assert "alembic_version" in tables, "alembic_version table missing"
215+
216+
def test_postgis_extension_enabled(self):
217+
"""PostGIS extension should be enabled."""
218+
with session_ctx() as session:
219+
result = session.execute(
220+
text("SELECT extname FROM pg_extension WHERE extname = 'postgis'")
221+
)
222+
postgis = result.scalar()
223+
224+
assert postgis == "postgis", "PostGIS extension not enabled"
225+
226+
227+
# =============================================================================
228+
# Foreign Key Integrity Tests
229+
# =============================================================================
230+
231+
232+
class TestForeignKeyIntegrity:
233+
"""Tests that verify FK relationships are properly defined."""
234+
235+
@pytest.fixture(autouse=True)
236+
def inspector(self):
237+
"""Provide SQLAlchemy inspector for schema introspection."""
238+
self._inspector = inspect(engine)
239+
yield
240+
self._inspector = None
241+
242+
def test_observation_has_sample_fk(self):
243+
"""Observation should have FK to Sample."""
244+
fks = self._inspector.get_foreign_keys("observation")
245+
fk_tables = {fk["referred_table"] for fk in fks}
246+
247+
assert "sample" in fk_tables, "Observation missing FK to sample"
248+
249+
def test_sample_has_field_activity_fk(self):
250+
"""Sample should have FK to FieldActivity."""
251+
fks = self._inspector.get_foreign_keys("sample")
252+
fk_tables = {fk["referred_table"] for fk in fks}
253+
254+
assert "field_activity" in fk_tables, "Sample missing FK to field_activity"
255+
256+
def test_field_activity_has_field_event_fk(self):
257+
"""FieldActivity should have FK to FieldEvent."""
258+
fks = self._inspector.get_foreign_keys("field_activity")
259+
fk_tables = {fk["referred_table"] for fk in fks}
260+
261+
assert "field_event" in fk_tables, "FieldActivity missing FK to field_event"
262+
263+
def test_field_event_has_thing_fk(self):
264+
"""FieldEvent should have FK to Thing."""
265+
fks = self._inspector.get_foreign_keys("field_event")
266+
fk_tables = {fk["referred_table"] for fk in fks}
267+
268+
assert "thing" in fk_tables, "FieldEvent missing FK to thing"
269+
270+
def test_nma_chemistry_has_thing_fk(self):
271+
"""NMA_Chemistry_SampleInfo should have FK to Thing."""
272+
fks = self._inspector.get_foreign_keys("NMA_Chemistry_SampleInfo")
273+
fk_tables = {fk["referred_table"] for fk in fks}
274+
275+
assert "thing" in fk_tables, "NMA_Chemistry_SampleInfo missing FK to thing"
276+
277+
278+
# =============================================================================
279+
# Index Tests
280+
# =============================================================================
281+
282+
283+
class TestIndexes:
284+
"""Tests that verify important indexes exist."""
285+
286+
@pytest.fixture(autouse=True)
287+
def inspector(self):
288+
"""Provide SQLAlchemy inspector for schema introspection."""
289+
self._inspector = inspect(engine)
290+
yield
291+
self._inspector = None
292+
293+
def test_location_has_spatial_index(self):
294+
"""Location table should have spatial index on point column."""
295+
indexes = self._inspector.get_indexes("location")
296+
index_columns = []
297+
for idx in indexes:
298+
index_columns.extend(idx.get("column_names", []))
299+
300+
# Spatial indexes may be named differently, check for point column
301+
# or gist index type
302+
has_point_index = "point" in index_columns or any(
303+
"point" in str(idx.get("name", "")).lower()
304+
or "gist" in str(idx.get("name", "")).lower()
305+
for idx in indexes
306+
)
307+
308+
# Also check via pg_indexes for GIST indexes
309+
if not has_point_index:
310+
with session_ctx() as session:
311+
result = session.execute(
312+
text(
313+
"SELECT indexname FROM pg_indexes "
314+
"WHERE tablename = 'location' "
315+
"AND indexdef LIKE '%gist%'"
316+
)
317+
)
318+
gist_indexes = result.fetchall()
319+
has_point_index = len(gist_indexes) > 0
320+
321+
assert has_point_index, "Location table missing spatial index on point"
322+
323+
324+
# =============================================================================
325+
# Downgrade Tests (Selective)
326+
# =============================================================================
327+
328+
329+
class TestMigrationDowngrade:
330+
"""
331+
Tests for migration downgrade capability.
332+
333+
Note: These tests are more expensive as they modify schema.
334+
Only test critical migrations.
335+
"""
336+
337+
@pytest.mark.skip(reason="Downgrade tests modify schema - run manually")
338+
def test_can_downgrade_one_revision(self):
339+
"""
340+
Should be able to downgrade one revision and upgrade back.
341+
342+
This is a destructive test - skipped by default.
343+
"""
344+
config = _alembic_config()
345+
script = ScriptDirectory.from_config(config)
346+
head = script.get_current_head()
347+
348+
# Get the revision before head
349+
head_script = script.get_revision(head)
350+
if head_script.down_revision is None:
351+
pytest.skip("Cannot downgrade from base revision")
352+
353+
previous = head_script.down_revision
354+
if isinstance(previous, tuple):
355+
previous = previous[0]
356+
357+
# Downgrade
358+
command.downgrade(config, previous)
359+
360+
# Verify we're at previous revision
361+
with engine.connect() as conn:
362+
result = conn.execute(text("SELECT version_num FROM alembic_version"))
363+
current = result.scalar()
364+
assert current == previous
365+
366+
# Upgrade back
367+
command.upgrade(config, "head")
368+
369+
# Verify we're back at head
370+
with engine.connect() as conn:
371+
result = conn.execute(text("SELECT version_num FROM alembic_version"))
372+
current = result.scalar()
373+
assert current == head

0 commit comments

Comments
 (0)