Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 75 additions & 6 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,9 @@ permissions:
contents: read

jobs:
run-tests:
unit-tests:
runs-on: ubuntu-latest

# Set shared env vars ONCE here for all steps
env:
MODE: development
POSTGRES_HOST: localhost
Expand Down Expand Up @@ -56,12 +55,21 @@ jobs:
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-dependency-glob: uv.lock

- name: Set up Python
id: setup-python
uses: actions/setup-python@v6.2.0
with:
python-version-file: "pyproject.toml"

- name: Cache project virtualenv
id: cache-venv
uses: actions/cache@v4
with:
path: .venv
key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('uv.lock') }}

- name: Install the project
run: uv sync --locked --all-extras --dev

Expand All @@ -76,13 +84,74 @@ jobs:
- name: Run tests
run: uv run pytest -vv --durations=20 --cov --cov-report=xml --junitxml=junit.xml --ignore=tests/transfers

- name: Run BDD tests
run: |
uv run behave tests/features --tags="@backend and @production and not @skip" --no-capture

- name: Upload results to Codecov
uses: codecov/codecov-action@v5
with:
report_type: test_results
token: ${{ secrets.CODECOV_TOKEN }}

bdd-tests:
runs-on: ubuntu-latest

env:
MODE: development
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: ocotilloapi_test
DB_DRIVER: postgres
BASE_URL: http://localhost:8000
SESSION_SECRET_KEY: supersecretkeyforunittests
AUTHENTIK_DISABLE_AUTHENTICATION: 1

services:
postgis:
image: postgis/postgis:17-3.5
env:
POSTGRES_PASSWORD: postgres
POSTGRES_PORT: 5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432

steps:
- name: Check out source repository
uses: actions/checkout@v6.0.2

- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-dependency-glob: uv.lock

- name: Set up Python
id: setup-python
uses: actions/setup-python@v6.2.0
with:
python-version-file: "pyproject.toml"

- name: Cache project virtualenv
id: cache-venv
uses: actions/cache@v4
with:
path: .venv
key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('uv.lock') }}

- name: Install the project
run: uv sync --locked --all-extras --dev

- name: Show Alembic heads
run: uv run alembic heads

- name: Create test database
run: |
PGPASSWORD=postgres psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE ocotilloapi_test"
PGPASSWORD=postgres psql -h localhost -p 5432 -U postgres -d ocotilloapi_test -c "CREATE EXTENSION IF NOT EXISTS postgis"

- name: Run BDD tests
run: uv run behave tests/features --tags="@backend and @production and not @skip" --no-capture
7 changes: 6 additions & 1 deletion core/lexicon.json
Original file line number Diff line number Diff line change
Expand Up @@ -2260,7 +2260,12 @@
"categories": ["status_value"],
"term": "Open",
"definition": "The well is open."
},
},
{
"categories": ["status_value"],
"term": "Open (unequipped)",
"definition": "The well is open and unequipped."
},
{
"categories": ["status_value"],
"term": "Closed",
Expand Down
43 changes: 24 additions & 19 deletions tests/features/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =============== ================================================================
import os
import random
from datetime import datetime, timedelta

from alembic import command
from alembic.config import Config
from core.initializers import init_lexicon, init_parameter
from db import (
Location,
Thing,
Expand All @@ -40,15 +44,14 @@
ThingAquiferAssociation,
GeologicFormation,
ThingGeologicFormationAssociation,
Base,
Asset,
Contact,
Sample,
Base,
)
from db.engine import session_ctx
from services.util import get_bool_env
from db.initialization import recreate_public_schema, sync_search_vector_triggers
from sqlalchemy import select
from transfers.transfer import _drop_and_rebuild_db


def add_context_object_container(name):
Expand Down Expand Up @@ -499,24 +502,26 @@ def add_geologic_formation(context, session, formation_code, well):
return formation


def before_all(context):
context.objects = {}
def _alembic_config() -> Config:
root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
cfg = Config(os.path.join(root, "alembic.ini"))
cfg.set_main_option("script_location", os.path.join(root, "alembic"))
return cfg

rebuild_raw = get_bool_env("DROP_AND_REBUILD_DB")
rebuild = rebuild_raw if isinstance(rebuild_raw, bool) else False
erase_data = False
if rebuild:
_drop_and_rebuild_db()
elif erase_data:
with session_ctx() as session:
for table in reversed(Base.metadata.sorted_tables):
if table.name in ("alembic_version", "parameter"):
continue
elif table.name.startswith("lexicon"):
continue

session.execute(table.delete())
session.commit()
def _initialize_test_schema() -> None:
with session_ctx() as session:
recreate_public_schema(session)
command.upgrade(_alembic_config(), "head")
with session_ctx() as session:
sync_search_vector_triggers(session)
init_lexicon()
init_parameter()


def before_all(context):
context.objects = {}
_initialize_test_schema()

with session_ctx() as session:

Expand Down
33 changes: 33 additions & 0 deletions tests/test_transfer_legacy_dates.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,12 @@
import datetime
from unittest.mock import patch

import numpy as np
import pandas as pd
import pytest

from db import Sample
from transfers.well_transfer import _normalize_completion_date
from transfers.util import make_location
from transfers.waterlevels_transfer import WaterLevelTransferer

Expand Down Expand Up @@ -207,6 +209,37 @@ def test_make_observation_maps_data_quality():
assert observation.nma_data_quality == "Mapped Quality"


def test_normalize_completion_date_drops_time_from_datetime():
value = datetime.datetime(2024, 7, 3, 14, 15, 16)
normalized, parse_failed = _normalize_completion_date(value)
assert normalized == datetime.date(2024, 7, 3)
assert parse_failed is False


def test_normalize_completion_date_drops_time_from_timestamp_and_string():
ts_value = pd.Timestamp("2021-05-06 23:59:00")
str_value = "2021-05-06 23:59:00.000"
normalized_ts, parse_failed_ts = _normalize_completion_date(ts_value)
normalized_str, parse_failed_str = _normalize_completion_date(str_value)
assert normalized_ts == datetime.date(2021, 5, 6)
assert normalized_str == datetime.date(2021, 5, 6)
assert parse_failed_ts is False
assert parse_failed_str is False


def test_normalize_completion_date_handles_numpy_datetime64():
value = np.datetime64("2020-01-02T03:04:05")
normalized, parse_failed = _normalize_completion_date(value)
assert normalized == datetime.date(2020, 1, 2)
assert parse_failed is False


def test_normalize_completion_date_invalid_returns_none_and_parse_failed():
normalized, parse_failed = _normalize_completion_date("not-a-date")
assert normalized is None
assert parse_failed is True


def test_get_dt_utc_respects_time_datum():
transfer = WaterLevelTransferer.__new__(WaterLevelTransferer)
transfer.errors = []
Expand Down
47 changes: 47 additions & 0 deletions transfers/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,38 @@
}


DEFINED_RECORDING_INTERVALS = {
"SA-0174": (1, "hour"),
"SO-0140": (15, "minute"),
"SO-0145": (15, "minute"),
"SO-0146": (15, "minute"),
"SO-0148": (15, "minute"),
"SO-0160": (15, "minute"),
"SO-0163": (15, "minute"),
"SO-0165": (15, "minute"),
"SO-0166": (15, "minute"),
"SO-0175": (15, "minute"),
"SO-0177": (15, "minute"),
"SO-0189": (15, "minute"),
"SO-0191": (15, "minute"),
"SO-0194": (15, "minute"),
"SO-0200": (15, "minute"),
"SO-0204": (15, "minute"),
"SO-0224": (15, "minute"),
"SO-0238": (15, "minute"),
"SO-0247": (15, "minute"),
"SO-0249": (15, "minute"),
"SO-0261": (15, "minute"),
"SM-0055": (6, "hour"),
"SM-0259": (12, "hour"),
"HS-038": (12, "hour"),
"EB-220": (12, "hour"),
"SO-0144": (15, "minute"),
"SO-0142": (15, "minute"),
"SO-0190": (15, "minute"),
}


class MeasuringPointEstimator:
def __init__(self):
df = read_csv("WaterLevels")
Expand Down Expand Up @@ -123,6 +155,12 @@ def estimate_measuring_point_height(
return mphs, mph_descs, start_dates, end_dates


def _get_defined_recording_interval(pointid: str) -> tuple[int, str] | None:
if pointid in DEFINED_RECORDING_INTERVALS:
return DEFINED_RECORDING_INTERVALS[pointid]
return None


class SensorParameterEstimator:
def __init__(self, sensor_type: str):
if sensor_type == "Pressure Transducer":
Expand Down Expand Up @@ -156,7 +194,16 @@ def estimate_recording_interval(
installation_date: datetime = None,
removal_date: datetime = None,
) -> tuple[int | None, str | None, str | None]:
"""
return estimated recording interval, unit, and error message if applicable
"""
point_id = record.PointID

# get statically defined recording interval provided by Ethan
ri = _get_defined_recording_interval(point_id)
if ri is not None:
return ri[0], ri[1], None

cdf = self._get_values(point_id)
if len(cdf) == 0:
return None, None, f"No measurements found for PointID: {point_id}"
Expand Down
Loading