diff --git a/api/.envexample b/api/.envexample index 1117d106..da8708a2 100644 --- a/api/.envexample +++ b/api/.envexample @@ -4,5 +4,8 @@ POSTGRES_PASSWORD= POSTGRES_HOST= POSTGRES_PORT= POSTGRES_DB= +JWT_SECRET_KEY= +JWT_ALGORITHM=HS256 +ACCESS_TOKEN_EXPIRE_HOURS=8 SETUP_DB=1 -POPULATE_DB=1 \ No newline at end of file +POPULATE_DB=1 diff --git a/api/auth/__init__.py b/api/auth/__init__.py new file mode 100644 index 00000000..9855b8c1 --- /dev/null +++ b/api/auth/__init__.py @@ -0,0 +1,15 @@ +from .dependencies import ScopedUser +from .session_tracking import ( + LAST_SEEN_UPDATE_INTERVAL, + create_user_session, + mark_session_signed_out, + touch_user_session, +) + +__all__ = [ + "ScopedUser", + "LAST_SEEN_UPDATE_INTERVAL", + "create_user_session", + "mark_session_signed_out", + "touch_user_session", +] diff --git a/api/auth/dependencies.py b/api/auth/dependencies.py new file mode 100644 index 00000000..a0848c83 --- /dev/null +++ b/api/auth/dependencies.py @@ -0,0 +1,13 @@ +from enum import Enum + +from api.security import scoped_user + + +class ScopedUser(Enum): + Read = scoped_user(["read"]) + Admin = scoped_user(["admin"]) + OSE = scoped_user(["ose"]) + ActivityWrite = scoped_user(["activities:write"]) + WellMeasurementWrite = scoped_user(["well_measurement:write"]) + MeterWrite = scoped_user(["meters:write"]) + WellWrite = scoped_user(["well:write"]) diff --git a/api/auth/session_tracking.py b/api/auth/session_tracking.py new file mode 100644 index 00000000..92bbf8b6 --- /dev/null +++ b/api/auth/session_tracking.py @@ -0,0 +1,223 @@ +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Optional +from uuid import uuid4 + +from fastapi import Request +from sqlalchemy.orm import Session + +from api.models.user import SignOutReasonTypeLU, UserSessions, Users + +LAST_SEEN_UPDATE_INTERVAL = timedelta(minutes=5) + + +def normalize_header_value(value: Optional[str]) -> Optional[str]: + if value is None: + return None + + normalized = value.strip() + return normalized or None + + +def extract_client_ip(request: Request) -> Optional[str]: + forwarded_for = normalize_header_value(request.headers.get("x-forwarded-for")) + if forwarded_for: + return forwarded_for.split(",")[0].strip() + + real_ip = normalize_header_value(request.headers.get("x-real-ip")) + if real_ip: + return real_ip + + if request.client: + return request.client.host + + return None + + +def parse_browser(user_agent: Optional[str]) -> Optional[str]: + if not user_agent: + return None + + browser_patterns = [ + ("Edg/", "Microsoft Edge"), + ("OPR/", "Opera"), + ("Opera", "Opera"), + ("SamsungBrowser/", "Samsung Internet"), + ("CriOS/", "Chrome (iOS)"), + ("Chrome/", "Chrome"), + ("Chromium/", "Chromium"), + ("FxiOS/", "Firefox (iOS)"), + ("Firefox/", "Firefox"), + ("Version/", "Safari"), + ("MSIE ", "Internet Explorer"), + ("Trident/", "Internet Explorer"), + ] + + for token, browser_name in browser_patterns: + if token in user_agent: + return browser_name + + return "Unknown Browser" + + +def parse_operating_system(user_agent: Optional[str]) -> Optional[str]: + if not user_agent: + return None + + os_patterns = [ + ("Windows NT", "Windows"), + ("Android", "Android"), + ("iPhone", "iOS"), + ("iPad", "iPadOS"), + ("Mac OS X", "macOS"), + ("CrOS", "ChromeOS"), + ("Linux", "Linux"), + ] + + for token, os_name in os_patterns: + if token in user_agent: + return os_name + + return "Unknown OS" + + +def parse_device_type(user_agent: Optional[str]) -> Optional[str]: + if not user_agent: + return None + + lowered_user_agent = user_agent.lower() + if "ipad" in lowered_user_agent or "tablet" in lowered_user_agent: + return "Tablet" + if "mobile" in lowered_user_agent or "iphone" in lowered_user_agent: + return "Mobile" + + return "Desktop" + + +def build_device_label( + browser: Optional[str], operating_system: Optional[str], device_type: Optional[str] +) -> Optional[str]: + if browser and operating_system: + return f"{browser} on {operating_system}" + if browser and device_type: + return f"{browser} ({device_type})" + return browser or operating_system or device_type + + +def create_user_session(db: Session, user: Users, request: Request) -> UserSessions: + user_agent = normalize_header_value(request.headers.get("user-agent")) + browser = normalize_header_value(request.headers.get("x-browser")) or parse_browser( + user_agent + ) + operating_system = normalize_header_value( + request.headers.get("x-operating-system") + ) or parse_operating_system(user_agent) + device_type = normalize_header_value( + request.headers.get("x-device-type") + ) or parse_device_type(user_agent) + device_label = normalize_header_value( + request.headers.get("x-device-label") + ) or build_device_label(browser, operating_system, device_type) + fingerprint_hash = normalize_header_value( + request.headers.get("x-device-fingerprint") + ) + + session = UserSessions( + user_id=user.id, + session_identifier=str(uuid4()), + ip_address=extract_client_ip(request), + user_agent=user_agent, + device_label=device_label, + device_type=device_type, + browser=browser, + operating_system=operating_system, + fingerprint_hash=fingerprint_hash, + signed_in_at=datetime.utcnow(), + last_seen_at=datetime.utcnow(), + is_active=True, + ) + + db.add(session) + db.flush() + + return session + + +def get_sign_out_reason( + db: Session, reason_name: Optional[str] +) -> Optional[SignOutReasonTypeLU]: + normalized_reason_name = normalize_header_value(reason_name) or "unknown" + sign_out_reason = ( + db.query(SignOutReasonTypeLU) + .filter(SignOutReasonTypeLU.name == normalized_reason_name) + .first() + ) + + if sign_out_reason: + return sign_out_reason + + return ( + db.query(SignOutReasonTypeLU) + .filter(SignOutReasonTypeLU.name == "unknown") + .first() + ) + + +def mark_session_signed_out( + db: Session, + session_identifier: str, + reason_name: Optional[str], + fingerprint_hash: Optional[str] = None, +) -> Optional[UserSessions]: + session = ( + db.query(UserSessions) + .filter(UserSessions.session_identifier == session_identifier) + .first() + ) + if not session: + return None + + if ( + fingerprint_hash + and session.fingerprint_hash + and session.fingerprint_hash != fingerprint_hash + ): + return None + + if session.signed_out_at is not None: + return session + + sign_out_reason = get_sign_out_reason(db, reason_name) + + session.signed_out_at = datetime.utcnow() + session.last_seen_at = session.signed_out_at + session.is_active = False + session.sign_out_reason_type_id = sign_out_reason.id if sign_out_reason else None + db.add(session) + + return session + + +def touch_user_session(db: Session, session_identifier: Optional[str]) -> None: + if not session_identifier: + return + + session = ( + db.query(UserSessions) + .filter( + UserSessions.session_identifier == session_identifier, + UserSessions.is_active.is_(True), + ) + .first() + ) + if not session: + return + + now = datetime.utcnow() + if session.last_seen_at and now - session.last_seen_at < LAST_SEEN_UPDATE_INTERVAL: + return + + session.last_seen_at = now + db.add(session) + db.commit() diff --git a/api/config.py b/api/config.py index a5011828..95e643ce 100644 --- a/api/config.py +++ b/api/config.py @@ -26,6 +26,9 @@ class Settings: "POSTGRES_PORT", 5432 ) # default postgres port is 5432 POSTGRES_DB: str = os.getenv("POSTGRES_DB") + JWT_SECRET_KEY: str | None = os.getenv("JWT_SECRET_KEY") + JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256") + ACCESS_TOKEN_EXPIRE_HOURS: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_HOURS", "8")) DATABASE_URL = f"postgresql+psycopg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}" diff --git a/api/enums.py b/api/enums.py index 4b545d0d..6c31a63b 100644 --- a/api/enums.py +++ b/api/enums.py @@ -1,5 +1,4 @@ from enum import Enum -from api.security import scoped_user class MeterSortByField(Enum): @@ -31,15 +30,6 @@ class SortDirection(Enum): Descending = "desc" -class ScopedUser(Enum): - Read = scoped_user(["read"]) - Admin = scoped_user(["admin"]) - OSE = scoped_user(["ose"]) - ActivityWrite = scoped_user(["activities:write"]) - WellMeasurementWrite = scoped_user(["well_measurement:write"]) - MeterWrite = scoped_user(["meters:write"]) - WellWrite = scoped_user(["well:write"]) - class WorkOrderStatus(Enum): Open = "Open" Closed = "Closed" diff --git a/api/main.py b/api/main.py index 7c6edd7c..facd1a99 100644 --- a/api/main.py +++ b/api/main.py @@ -1,11 +1,11 @@ from datetime import timedelta -from fastapi import FastAPI, Depends, HTTPException +from fastapi import FastAPI, Depends, HTTPException, Request from fastapi.security import OAuth2PasswordRequestForm from fastapi_pagination import add_pagination from fastapi.middleware.cors import CORSMiddleware from starlette import status -from api.schemas import security_schemas -from api.models.main_models import Users +from api.schemas import security as security_schema +from api.models.user import Users from api.routes.activities import activity_router, public_activity_router from api.routes.admin import admin_router from api.routes.chlorides import authenticated_chlorides_router, public_chlorides_router @@ -18,18 +18,22 @@ from api.routes.OSE import ose_router from api.routes.parts import part_router from api.routes.settings import settings_router +from api.routes.user_sessions import user_sessions_router +from api.routes.work_orders import work_orders_router from api.routes.well_measurements import ( authenticated_well_measurement_router, public_well_measurement_router, ) from api.routes.wells import authenticated_well_router, public_well_router +from api.auth.session_tracking import create_user_session, touch_user_session from api.security import ( authenticate_user, create_access_token, ACCESS_TOKEN_EXPIRE_HOURS, authenticated_router, + get_session_identifier_from_token, ) -from api.session import get_db +from api.session import get_db, SessionLocal from sqlalchemy.orm import Session tags_metadata = [ @@ -83,9 +87,11 @@ # ============== Security ============== -@app.post("/token", response_model=security_schemas.Token, tags=["Login"]) +@app.post("/token", response_model=security_schema.Token, tags=["Login"]) def login_for_access_token( - form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_db) + request: Request, + form_data: OAuth2PasswordRequestForm = Depends(), + db: Session = Depends(get_db), ): user: Users = authenticate_user(form_data.username, form_data.password, db) if not user: @@ -102,18 +108,49 @@ def login_for_access_token( headers={"WWW-Authenticate": "Bearer"}, ) + user_session = create_user_session(db=db, user=user, request=request) + access_token = create_access_token( data={ "sub": user.username, + "sid": user_session.session_identifier, "scopes": list( map(lambda scope: scope.scope_string, user.user_role.security_scopes) ), }, expires_delta=timedelta(hours=ACCESS_TOKEN_EXPIRE_HOURS), ) - user_response = security_schemas.User(**user.__dict__) + user_response = security_schema.User(**user.__dict__) + db.commit() + + return { + "access_token": access_token, + "token_type": "bearer", + "user": user_response, + "session_identifier": user_session.session_identifier, + } + + +@app.middleware("http") +async def update_user_session_last_seen(request: Request, call_next): + authorization_header = request.headers.get("authorization") + if authorization_header and authorization_header.startswith("Bearer "): + token = authorization_header.removeprefix("Bearer ").strip() + session_identifier = None + + try: + session_identifier = get_session_identifier_from_token(token) + except Exception: + session_identifier = None + + if session_identifier: + db = SessionLocal() + try: + touch_user_session(db, session_identifier) + finally: + db.close() - return {"access_token": access_token, "token_type": "bearer", "user": user_response} + return await call_next(request) # ======================================= @@ -125,6 +162,7 @@ def login_for_access_token( authenticated_router.include_router(authenticated_meter_router) authenticated_router.include_router(notifications_router) authenticated_router.include_router(part_router) +authenticated_router.include_router(work_orders_router) authenticated_router.include_router(authenticated_well_measurement_router) authenticated_router.include_router(authenticated_well_router) authenticated_router.include_router(settings_router) @@ -138,4 +176,5 @@ def login_for_access_token( app.include_router(public_chlorides_router) app.include_router(public_maintenance_router) app.include_router(public_well_measurement_router) +app.include_router(user_sessions_router) app.include_router(authenticated_router) diff --git a/api/models/__init__.py b/api/models/__init__.py new file mode 100644 index 00000000..ff65bc7f --- /dev/null +++ b/api/models/__init__.py @@ -0,0 +1,74 @@ +from api.models.base import Base +from api.models.location import LandOwners, Locations, LocationTypeLU +from api.models.meter import ( + ActivityTypeLU, + MeterActivities, + MeterActivityPhotos, + MeterObservations, + MeterStatusLU, + MeterTypeLU, + Meters, + NoteTypeLU, + Notes, + ObservedPropertyTypeLU, + PropertyUnits, + ServiceTypeLU, + ServicesPerformed, + Units, + meterRegisters, +) +from api.models.part import PartAssociation, PartTypeLU, Parts, PartsAdded, PartsUsed +from api.models.user import ( + NotificationTypeLU, + Notifications, + ScopesRoles, + SecurityScopes, + SignOutReasonTypeLU, + UserRoles, + UserSessions, + Users, +) +from api.models.well import WellMeasurements, Wells, WellStatus, WellUseLU, WaterSources +from api.models.work_order import workOrders, workOrderStatusLU + +__all__ = [ + "ActivityTypeLU", + "Base", + "LandOwners", + "Locations", + "LocationTypeLU", + "MeterActivities", + "MeterActivityPhotos", + "MeterObservations", + "MeterStatusLU", + "MeterTypeLU", + "Meters", + "NoteTypeLU", + "Notes", + "NotificationTypeLU", + "Notifications", + "ObservedPropertyTypeLU", + "PartAssociation", + "PartTypeLU", + "Parts", + "PartsAdded", + "PartsUsed", + "PropertyUnits", + "ScopesRoles", + "SecurityScopes", + "ServiceTypeLU", + "ServicesPerformed", + "SignOutReasonTypeLU", + "Units", + "UserRoles", + "UserSessions", + "Users", + "WellMeasurements", + "Wells", + "WellStatus", + "WellUseLU", + "WaterSources", + "meterRegisters", + "workOrders", + "workOrderStatusLU", +] diff --git a/api/models/base.py b/api/models/base.py new file mode 100644 index 00000000..ca755b8c --- /dev/null +++ b/api/models/base.py @@ -0,0 +1,11 @@ +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(DeclarativeBase): + """ + Base class for all models + - Adds id column on all tables + """ + + id: Mapped[int] = mapped_column(primary_key=True) + __name__: str diff --git a/api/models/location.py b/api/models/location.py new file mode 100644 index 00000000..5947f155 --- /dev/null +++ b/api/models/location.py @@ -0,0 +1,66 @@ +from sqlalchemy import Float, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship +from geoalchemy2.shape import to_shape + +from api.models.base import Base + + +class Locations(Base): + __tablename__ = "Locations" + + name: Mapped[str] = mapped_column(String) + trss: Mapped[str] = mapped_column(String) + latitude: Mapped[float] = mapped_column(Float, nullable=False) + longitude: Mapped[float] = mapped_column(Float, nullable=False) + township: Mapped[int] = mapped_column(Integer) + range: Mapped[int] = mapped_column(Integer) + section: Mapped[int] = mapped_column(Integer) + quarter: Mapped[int] = mapped_column(Integer) + half_quarter: Mapped[int] = mapped_column(Integer) + quarter_quarter: Mapped[int] = mapped_column(Integer) + + type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("LocationTypeLU.id"), nullable=False + ) + land_owner_id: Mapped[int] = mapped_column(Integer, ForeignKey("LandOwners.id")) + + land_owner: Mapped["LandOwners"] = relationship() + type: Mapped["LocationTypeLU"] = relationship() + + @property + def lat(self): + try: + return to_shape(self.geom).y + except BaseException: + return + + @property + def long(self): + try: + return to_shape(self.geom).x + except BaseException: + return + + @property + def location(self): + return f"{self.township}.{self.range}.{self.section}.{self.quarter}.{self.half_quarter}" + + +class LocationTypeLU(Base): + __tablename__ = "LocationTypeLU" + type_name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +class LandOwners(Base): + __tablename__ = "LandOwners" + contact_name: Mapped[str] = mapped_column(String) + organization: Mapped[str] = mapped_column(String) + address: Mapped[str] = mapped_column(String) + city: Mapped[str] = mapped_column(String) + state: Mapped[str] = mapped_column(String) + zip: Mapped[str] = mapped_column(String) + phone: Mapped[str] = mapped_column(String) + mobile: Mapped[str] = mapped_column(String) + email: Mapped[str] = mapped_column(String) + note: Mapped[str] = mapped_column(String) diff --git a/api/models/main_models.py b/api/models/main_models.py index c289ced4..9b73ede9 100644 --- a/api/models/main_models.py +++ b/api/models/main_models.py @@ -1,718 +1,83 @@ -from sqlalchemy import ( - Column, - Integer, - String, - ForeignKey, - Float, - DateTime, - func, - Boolean, - Table, - Numeric, - Date, +from api.models import ( + ActivityTypeLU, + Base, + LandOwners, + Locations, + LocationTypeLU, + MeterActivities, + MeterActivityPhotos, + MeterObservations, + MeterStatusLU, + MeterTypeLU, + Meters, + NoteTypeLU, + Notes, + NotificationTypeLU, + Notifications, + ObservedPropertyTypeLU, + PartAssociation, + PartTypeLU, + Parts, + PartsAdded, + PartsUsed, + PropertyUnits, + ScopesRoles, + SecurityScopes, + ServiceTypeLU, + ServicesPerformed, + SignOutReasonTypeLU, + Units, + UserRoles, + UserSessions, + Users, + WellMeasurements, + Wells, + WellStatus, + WellUseLU, + WaterSources, + meterRegisters, + workOrders, + workOrderStatusLU, ) -from sqlalchemy.orm import ( - relationship, - DeclarativeBase, - mapped_column, - Mapped, - deferred, -) -from geoalchemy2.shape import to_shape -from datetime import date -from typing import Optional, List - - -class Base(DeclarativeBase): - """ - Base class for all models - - Adds id column on all tables - """ - - id: Mapped[int] = mapped_column(primary_key=True) - __name__: str - - -class PartTypeLU(Base): - """ - The types of parts - """ - - __tablename__ = "PartTypeLU" - name: Mapped[str] - description: Mapped[str] - - -# Association table that links meter types and their commonly used parts -# see https://docs.sqlalchemy.org/en/20/orm/basic_relationships.html#many-to-many -PartAssociation = Table( - "PartAssociation", - Base.metadata, - Column("part_id", ForeignKey("Parts.id"), nullable=False), - Column("meter_type_id", ForeignKey("MeterTypeLU.id"), nullable=False), -) - - -class Parts(Base): - """ - All parts - """ - - __tablename__ = "Parts" - - part_number: Mapped[str] = mapped_column(String, unique=True, nullable=False) - description: Mapped[Optional[str]] - vendor: Mapped[Optional[str]] - initial_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) - note: Mapped[Optional[str]] - in_use: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) - commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) - price: Mapped[Optional[float]] = mapped_column(Float) - - part_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("PartTypeLU.id"), nullable=False - ) - part_type: Mapped["PartTypeLU"] = relationship() - - # The meter types associated with this part - meter_types: Mapped[Optional[List["MeterTypeLU"]]] = relationship( - secondary=PartAssociation - ) - - parts_used_links: Mapped[list["PartsUsed"]] = relationship( - back_populates="part", - cascade="all, delete-orphan", - ) - - -class PartsUsed(Base): - __tablename__ = "PartsUsed" - - id: Mapped[int] = mapped_column(Integer, primary_key=True) - meter_activity_id: Mapped[int] = mapped_column( - ForeignKey("MeterActivities.id"), nullable=False - ) - part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) - - count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) - - part: Mapped["Parts"] = relationship(back_populates="parts_used_links") - meter_activity: Mapped["MeterActivities"] = relationship( - back_populates="parts_used_links" - ) - - -class PartsAdded(Base): - __tablename__ = "PartsAdded" - - id: Mapped[int] = mapped_column(Integer, primary_key=True) - part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) - - count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) - date: Mapped[date] = mapped_column(Date, nullable=False) # default handled by DB - note: Mapped[str | None] = mapped_column(String, nullable=True) - - part: Mapped["Parts"] = relationship - - -class ServiceTypeLU(Base): - """ - Describes the type of service performed during an activity - """ - - __tablename__ = "ServiceTypeLU" - service_name: Mapped[str] - description: Mapped[str] - - -# Association table that links meter activities and the services that were performed during -ServicesPerformed = Table( - "ServicesPerformed", - Base.metadata, - Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), - Column("service_type_id", ForeignKey("ServiceTypeLU.id"), nullable=False), -) - - -class NoteTypeLU(Base): - """ - Pre-defined notes that can be set on activities - """ - - __tablename__ = "NoteTypeLU" - note: Mapped[str] - details: Mapped[str] - - # Either one of the special 3 slugs that represent the working status of a meter or null - # working | not-working | not-checked | null - slug: Mapped[str] - # Commonly Used determines what is displayed by default - commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) - - -# Association table that links notes and the meter activity they were added to -Notes = Table( +__all__ = [ + "ActivityTypeLU", + "Base", + "LandOwners", + "Locations", + "LocationTypeLU", + "MeterActivities", + "MeterActivityPhotos", + "MeterObservations", + "MeterStatusLU", + "MeterTypeLU", + "Meters", + "NoteTypeLU", "Notes", - Base.metadata, - Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), - Column("note_type_id", ForeignKey("NoteTypeLU.id"), nullable=False), -) - - -class Meters(Base): - """ - Primary table for tracking meters - """ - - __tablename__ = "Meters" - serial_number: Mapped[str] = mapped_column(String, nullable=False) - # Contact information specific to particular meter - contact_name: Mapped[Optional[str]] = mapped_column(String) - contact_phone: Mapped[Optional[str]] = mapped_column(String) - notes: Mapped[Optional[str]] = mapped_column(String) - price: Mapped[Optional[float]] = mapped_column(Numeric(10, 2)) - - meter_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("MeterTypeLU.id"), nullable=False - ) - status_id: Mapped[int] = mapped_column( - Integer, ForeignKey("MeterStatusLU.id"), nullable=False - ) - well_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Wells.id"), nullable=False - ) - location_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Locations.id"), nullable=False - ) - register_id: Mapped[int] = mapped_column( - Integer, ForeignKey("meter_registers.id"), nullable=True - ) - - water_users: Mapped[Optional[str]] = mapped_column(String) - meter_owner: Mapped[Optional[str]] = mapped_column(String) - - meter_type: Mapped["MeterTypeLU"] = relationship() - meter_register: Mapped["meterRegisters"] = relationship() - status: Mapped["MeterStatusLU"] = relationship() - well: Mapped["Wells"] = relationship("Wells", back_populates="meters") - location: Mapped["Locations"] = relationship() - - -class MeterTypeLU(Base): - """ - Meter types - """ - - __tablename__ = "MeterTypeLU" - brand: Mapped[str] = mapped_column(String) - series: Mapped[str] = mapped_column(String) - model: Mapped[str] = mapped_column(String) - size: Mapped[float] = mapped_column(Float) - description: Mapped[str] = mapped_column(String) - in_use: Mapped[bool] = mapped_column(Boolean, nullable=False) - - -class MeterStatusLU(Base): - """ - Establishes if a meter is installed, in inventory, retired, or other options as needed. - """ - - __tablename__ = "MeterStatusLU" - status_name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -class MeterActivities(Base): - """ - Logs all meter activities - """ - - __tablename__ = "MeterActivities" - timestamp_start: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - timestamp_end: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - description: Mapped[DateTime] = mapped_column(String) - - submitting_user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id"), nullable=False - ) - meter_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Meters.id"), nullable=False - ) - activity_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("ActivityTypeLU.id"), nullable=False - ) - location_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Locations.id"), nullable=False - ) - ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) - water_users: Mapped[str] = mapped_column(String) - work_order_id: Mapped[int] = mapped_column(Integer, ForeignKey("work_orders.id")) - - submitting_user: Mapped["Users"] = relationship() - meter: Mapped["Meters"] = relationship() - activity_type: Mapped["ActivityTypeLU"] = relationship() - location: Mapped["Locations"] = relationship() - - services_performed: Mapped[List["ServiceTypeLU"]] = relationship( - "ServiceTypeLU", secondary=ServicesPerformed - ) - notes: Mapped[List["NoteTypeLU"]] = relationship("NoteTypeLU", secondary=Notes) - work_order: Mapped["workOrders"] = relationship() - well: Mapped["Wells"] = relationship( - "Wells", - primaryjoin="MeterActivities.location_id == Wells.location_id", - foreign_keys="MeterActivities.location_id", - viewonly=True, - ) - photos: Mapped[List["MeterActivityPhotos"]] = relationship( - "MeterActivityPhotos", back_populates="meter_activity", cascade="all, delete" - ) - - parts_used_links: Mapped[list["PartsUsed"]] = relationship( - back_populates="meter_activity", - cascade="all, delete-orphan", - ) - - -class MeterActivityPhotos(Base): - __tablename__ = "MeterActivityPhotos" - - id: Mapped[int] = mapped_column( - Integer, primary_key=True, index=True, autoincrement=True - ) - meter_activity_id: Mapped[int] = mapped_column( - Integer, ForeignKey("MeterActivities.id", ondelete="CASCADE"), nullable=False - ) - file_name: Mapped[str] = mapped_column(String, nullable=False) - gcs_path: Mapped[str] = mapped_column(String, nullable=False) - uploaded_at: Mapped[DateTime] = mapped_column( - DateTime(timezone=True), server_default=func.now() - ) - original_file_name = Column(String, nullable=True) - meter_activity: Mapped["MeterActivities"] = relationship( - "MeterActivities", back_populates="photos" - ) - - -class ActivityTypeLU(Base): - """ - Details the different types of activities PVACD implements - """ - - __tablename__ = "ActivityTypeLU" - name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - # Specifies who can perform this activity (must be either 'technician' or 'admin') - # If admin, only admins can perform, if technician then technician or admin can perform - permission: Mapped[str] = mapped_column(String) - - -class MeterObservations(Base): - """ - Tracks all observations associated with a meter - """ - - __tablename__ = "MeterObservations" - timestamp: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - value: Mapped[float] = mapped_column(Float, nullable=False) - notes: Mapped[str] = mapped_column(String) - ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) - - submitting_user_id: Mapped[int] = mapped_column(Integer, ForeignKey("Users.id")) - meter_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Meters.id"), nullable=False - ) - observed_property_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False - ) - unit_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - location_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Locations.id"), nullable=False - ) - - submitting_user: Mapped["Users"] = relationship() - meter: Mapped["Meters"] = relationship() - observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() - unit: Mapped["Units"] = relationship() - location: Mapped["Locations"] = relationship() - - -class ObservedPropertyTypeLU(Base): - """ - Defines the types of observations made on a meter - """ - - __tablename__ = "ObservedPropertyTypeLU" - name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - context: Mapped[str] = mapped_column( - String - ) # Specifies if property associated with 'meter' or 'well' - - # The units that can be used on this property type - units: Mapped[List["Units"]] = relationship(secondary="PropertyUnits") - - -class Units(Base): - """ - Defines units used in observations - """ - - __tablename__ = "Units" - name: Mapped[str] = mapped_column(String) - name_short: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -PropertyUnits = Table( + "NotificationTypeLU", + "Notifications", + "ObservedPropertyTypeLU", + "PartAssociation", + "PartTypeLU", + "Parts", + "PartsAdded", + "PartsUsed", "PropertyUnits", - Base.metadata, - Column("property_id", ForeignKey("ObservedPropertyTypeLU.id"), nullable=False), - Column("unit_id", ForeignKey("Units.id"), nullable=False), -) - - -class Locations(Base): - """ - Table for tracking information about a well's location - """ - - __tablename__ = "Locations" - name: Mapped[str] = mapped_column(String) - trss: Mapped[str] = mapped_column(String) - latitude: Mapped[float] = mapped_column(Float, nullable=False) - longitude: Mapped[float] = mapped_column(Float, nullable=False) - township: Mapped[int] = mapped_column(Integer) - range: Mapped[int] = mapped_column(Integer) - section: Mapped[int] = mapped_column(Integer) - quarter: Mapped[int] = mapped_column(Integer) - half_quarter: Mapped[int] = mapped_column(Integer) - quarter_quarter: Mapped[int] = mapped_column(Integer) - - type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("LocationTypeLU.id"), nullable=False - ) - land_owner_id: Mapped[int] = mapped_column(Integer, ForeignKey("LandOwners.id")) - - land_owner: Mapped["LandOwners"] = relationship() - type: Mapped["LocationTypeLU"] = relationship() - - @property - def lat(self): - try: - return to_shape(self.geom).y - except BaseException: - return - - @property - def long(self): - try: - return to_shape(self.geom).x - except BaseException: - return - - @property - def location(self): - return f"{self.township}.{self.range}.{self.section}.{self.quarter}.{self.half_quarter}" - - -class LocationTypeLU(Base): - """ - Defines the type of location, such as well - """ - - __tablename__ = "LocationTypeLU" - type_name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -class LandOwners(Base): - """ - Organizations and people that have some relationship with a PVACD meter - - Typically irrigators? - """ - - __tablename__ = "LandOwners" - contact_name: Mapped[str] = mapped_column(String) - organization: Mapped[str] = mapped_column(String) - address: Mapped[str] = mapped_column(String) - city: Mapped[str] = mapped_column(String) - state: Mapped[str] = mapped_column(String) - zip: Mapped[str] = mapped_column(String) - phone: Mapped[str] = mapped_column(String) - mobile: Mapped[str] = mapped_column(String) - email: Mapped[str] = mapped_column(String) - note: Mapped[str] = mapped_column(String) - - -class Users(Base): - """ - All info about a user of the app - """ - - __tablename__ = "Users" - - full_name: Mapped[str] = mapped_column(String) - disabled: Mapped[bool] = mapped_column(Boolean, default=False) - username: Mapped[str] = deferred( - mapped_column(String, nullable=False) - ) # Defer sensitive info so it's not sent when it's included as part of a relationship - email: Mapped[str] = deferred(mapped_column(String)) - hashed_password: Mapped[str] = deferred(mapped_column(String, nullable=False)) - - user_role_id: Mapped[int] = deferred( - mapped_column(Integer, ForeignKey("UserRoles.id"), nullable=False) - ) - - user_role: Mapped["UserRoles"] = relationship("UserRoles") - display_name: Mapped[str] = mapped_column(String, nullable=True) - redirect_page: Mapped[str] = mapped_column(String, nullable=True, default="/") - avatar_img: Mapped[str] = mapped_column(String, nullable=True) - notifications: Mapped[List["Notifications"]] = relationship( - "Notifications", - back_populates="user", - cascade="all, delete-orphan", - foreign_keys="Notifications.user_id", - ) - created_notifications: Mapped[List["Notifications"]] = relationship( - "Notifications", - back_populates="creator", - foreign_keys="Notifications.created_by", - ) - - -class NotificationTypeLU(Base): - __tablename__ = "notification_type_lu" - - name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) - description: Mapped[Optional[str]] = mapped_column(String) - - notifications: Mapped[List["Notifications"]] = relationship( - "Notifications", back_populates="notification_type" - ) - - -class Notifications(Base): - __tablename__ = "notifications" - - user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True - ) - notification_type_id: Mapped[int] = mapped_column( - Integer, - ForeignKey( - "notification_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" - ), - index=True, - ) - created_by: Mapped[Optional[int]] = mapped_column( - Integer, ForeignKey("Users.id", ondelete="SET NULL", onupdate="CASCADE"), index=True - ) - title: Mapped[str] = mapped_column(String(255), nullable=False) - message: Mapped[str] = mapped_column(String, nullable=False) - link: Mapped[Optional[str]] = mapped_column(String(500)) - is_read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, index=True) - created_at: Mapped[DateTime] = mapped_column( - DateTime, nullable=False, server_default=func.now(), index=True - ) - read_at: Mapped[Optional[DateTime]] = mapped_column(DateTime) - - user: Mapped["Users"] = relationship( - "Users", back_populates="notifications", foreign_keys=[user_id] - ) - creator: Mapped[Optional["Users"]] = relationship( - "Users", back_populates="created_notifications", foreign_keys=[created_by] - ) - notification_type: Mapped["NotificationTypeLU"] = relationship( - "NotificationTypeLU", back_populates="notifications" - ) - - -# Association table that links roles and their associated scopes -ScopesRoles = Table( "ScopesRoles", - Base.metadata, - Column("security_scope_id", ForeignKey("SecurityScopes.id"), nullable=False), - Column("user_role_id", ForeignKey("UserRoles.id"), nullable=False), -) - - -class SecurityScopes(Base): - """ - Individual permissions - """ - - __tablename__ = "SecurityScopes" - scope_string: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String) - - -class UserRoles(Base): - __tablename__ = "UserRoles" - name: Mapped[str] = mapped_column(String, nullable=False) - - # The scopes associated with a given role - security_scopes: Mapped[List["SecurityScopes"]] = relationship( - secondary=ScopesRoles - ) - - -class WellUseLU(Base): - """ - The type of well - """ - - __tablename__ = "WellUseLU" - use_type: Mapped[str] = mapped_column(String, nullable=False) - code: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -class WaterSources(Base): - """ - The source of water for a well - """ - - __tablename__ = "water_sources" - name: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String) - - -class WellStatus(Base): - """ - The status of a well - """ - - __tablename__ = "well_status" - status: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String) - - -class Wells(Base): - """ - All wells - """ - - __tablename__ = "Wells" - name: Mapped[str] = mapped_column(String) - ra_number: Mapped[str] = mapped_column( - String - ) # RA Number is an OSE well identifier - owners: Mapped[str] = mapped_column(String) - osetag: Mapped[str] = mapped_column(String) - casing: Mapped[str] = mapped_column(String) - total_depth: Mapped[float] = mapped_column(Float) - outside_recorder: Mapped[str] = mapped_column(Boolean) - - use_type_id: Mapped[int] = mapped_column(Integer, ForeignKey("WellUseLU.id")) - location_id: Mapped[int] = mapped_column(Integer, ForeignKey("Locations.id")) - water_source_id: Mapped[int] = mapped_column( - Integer, ForeignKey("water_sources.id") - ) - well_status_id: Mapped[int] = mapped_column(Integer, ForeignKey("well_status.id")) - chloride_group_id: Mapped[int] = mapped_column(Integer) - - use_type: Mapped["WellUseLU"] = relationship() - location: Mapped["Locations"] = relationship() - water_source: Mapped["WaterSources"] = relationship() - well_status: Mapped["WellStatus"] = relationship() - - meters: Mapped[List["Meters"]] = relationship("Meters", back_populates="well") - - -class WellMeasurements(Base): - """ - The measurements made on a monitored well - """ - - __tablename__ = "WellMeasurements" - timestamp: Mapped[DateTime] = mapped_column( - DateTime, default=func.now(), nullable=False - ) - value: Mapped[Optional[float]] = mapped_column(Float, nullable=True) - - observed_property_id: Mapped[int] = mapped_column( - Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False - ) - submitting_user_id: Mapped[Optional[int]] = mapped_column( - Integer, ForeignKey("Users.id"), nullable=True - ) - unit_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - well_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Wells.id"), nullable=False - ) - - observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() - submitting_user: Mapped["Users"] = relationship() - unit: Mapped["Units"] = relationship() - well: Mapped["Wells"] = relationship() - - -class workOrderStatusLU(Base): - """ - Models the status of a work order - """ - - __tablename__ = "work_order_status_lu" - name = mapped_column(String, nullable=False) - description = mapped_column(String, nullable=False) - - -class workOrders(Base): - """ - Models work orders and associated information - """ - - __tablename__ = "work_orders" - date_created: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - creator: Mapped[str] = mapped_column( - String, nullable=True - ) # There is no consistent list of persons for this, so it is nullable - title: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String, nullable=True) - meter_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Meters.id"), nullable=False - ) - status_id: Mapped[int] = mapped_column( - Integer, ForeignKey("work_order_status_lu.id"), nullable=False - ) - notes: Mapped[str] = mapped_column(String, nullable=True) - assigned_user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id"), nullable=True - ) - ose_request_id: Mapped[int] = mapped_column(Integer, nullable=True) - - meter: Mapped["Meters"] = relationship() - status: Mapped["workOrderStatusLU"] = relationship() - assigned_user: Mapped["Users"] = relationship() - - -class meterRegisters(Base): - """ - Models the registers of a meter - """ - - __tablename__ = "meter_registers" - brand: Mapped[str] = mapped_column(String, nullable=False) - meter_size: Mapped[float] = mapped_column(Float, nullable=False) - part_id: Mapped[int] = mapped_column(Integer, ForeignKey("Parts.id")) - ratio: Mapped[str] = mapped_column(String) - dial_units_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - totalizer_units_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - number_of_digits: Mapped[int] = mapped_column(Integer, nullable=False) - decimal_digits: Mapped[int] = mapped_column(Integer) - multiplier: Mapped[float] = mapped_column(Float, nullable=False) - notes: Mapped[str] = mapped_column(String) - - dial_units: Mapped["Units"] = relationship(foreign_keys=[dial_units_id]) - totalizer_units: Mapped["Units"] = relationship(foreign_keys=[totalizer_units_id]) + "SecurityScopes", + "ServiceTypeLU", + "ServicesPerformed", + "SignOutReasonTypeLU", + "Units", + "UserRoles", + "UserSessions", + "Users", + "WellMeasurements", + "Wells", + "WellStatus", + "WellUseLU", + "WaterSources", + "meterRegisters", + "workOrders", + "workOrderStatusLU", +] diff --git a/api/models/meter.py b/api/models/meter.py new file mode 100644 index 00000000..a833af0c --- /dev/null +++ b/api/models/meter.py @@ -0,0 +1,231 @@ +from typing import List, Optional + +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Float, + ForeignKey, + Integer, + Numeric, + String, + Table, + func, +) +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class ServiceTypeLU(Base): + __tablename__ = "ServiceTypeLU" + service_name: Mapped[str] + description: Mapped[str] + + +ServicesPerformed = Table( + "ServicesPerformed", + Base.metadata, + Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), + Column("service_type_id", ForeignKey("ServiceTypeLU.id"), nullable=False), +) + + +class NoteTypeLU(Base): + __tablename__ = "NoteTypeLU" + note: Mapped[str] + details: Mapped[str] + slug: Mapped[str] + commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + + +Notes = Table( + "Notes", + Base.metadata, + Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), + Column("note_type_id", ForeignKey("NoteTypeLU.id"), nullable=False), +) + + +class Meters(Base): + __tablename__ = "Meters" + serial_number: Mapped[str] = mapped_column(String, nullable=False) + contact_name: Mapped[Optional[str]] = mapped_column(String) + contact_phone: Mapped[Optional[str]] = mapped_column(String) + notes: Mapped[Optional[str]] = mapped_column(String) + price: Mapped[Optional[float]] = mapped_column(Numeric(10, 2)) + + meter_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("MeterTypeLU.id"), nullable=False + ) + status_id: Mapped[int] = mapped_column( + Integer, ForeignKey("MeterStatusLU.id"), nullable=False + ) + well_id: Mapped[int] = mapped_column(Integer, ForeignKey("Wells.id"), nullable=False) + location_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Locations.id"), nullable=False + ) + register_id: Mapped[int] = mapped_column( + Integer, ForeignKey("meter_registers.id"), nullable=True + ) + water_users: Mapped[Optional[str]] = mapped_column(String) + meter_owner: Mapped[Optional[str]] = mapped_column(String) + + meter_type: Mapped["MeterTypeLU"] = relationship() + meter_register: Mapped["meterRegisters"] = relationship() + status: Mapped["MeterStatusLU"] = relationship() + well: Mapped["Wells"] = relationship("Wells", back_populates="meters") + location: Mapped["Locations"] = relationship() + + +class MeterTypeLU(Base): + __tablename__ = "MeterTypeLU" + brand: Mapped[str] = mapped_column(String) + series: Mapped[str] = mapped_column(String) + model: Mapped[str] = mapped_column(String) + size: Mapped[float] = mapped_column(Float) + description: Mapped[str] = mapped_column(String) + in_use: Mapped[bool] = mapped_column(Boolean, nullable=False) + + +class MeterStatusLU(Base): + __tablename__ = "MeterStatusLU" + status_name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +class MeterActivities(Base): + __tablename__ = "MeterActivities" + timestamp_start: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + timestamp_end: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + description: Mapped[DateTime] = mapped_column(String) + submitting_user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id"), nullable=False + ) + meter_id: Mapped[int] = mapped_column(Integer, ForeignKey("Meters.id"), nullable=False) + activity_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ActivityTypeLU.id"), nullable=False + ) + location_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Locations.id"), nullable=False + ) + ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) + water_users: Mapped[str] = mapped_column(String) + work_order_id: Mapped[int] = mapped_column(Integer, ForeignKey("work_orders.id")) + + submitting_user: Mapped["Users"] = relationship() + meter: Mapped["Meters"] = relationship() + activity_type: Mapped["ActivityTypeLU"] = relationship() + location: Mapped["Locations"] = relationship() + services_performed: Mapped[List["ServiceTypeLU"]] = relationship( + "ServiceTypeLU", secondary=ServicesPerformed + ) + notes: Mapped[List["NoteTypeLU"]] = relationship("NoteTypeLU", secondary=Notes) + work_order: Mapped["workOrders"] = relationship() + well: Mapped["Wells"] = relationship( + "Wells", + primaryjoin="MeterActivities.location_id == Wells.location_id", + foreign_keys="MeterActivities.location_id", + viewonly=True, + ) + photos: Mapped[List["MeterActivityPhotos"]] = relationship( + "MeterActivityPhotos", back_populates="meter_activity", cascade="all, delete" + ) + parts_used_links: Mapped[list["PartsUsed"]] = relationship( + back_populates="meter_activity", + cascade="all, delete-orphan", + ) + + +class MeterActivityPhotos(Base): + __tablename__ = "MeterActivityPhotos" + + id: Mapped[int] = mapped_column( + Integer, primary_key=True, index=True, autoincrement=True + ) + meter_activity_id: Mapped[int] = mapped_column( + Integer, ForeignKey("MeterActivities.id", ondelete="CASCADE"), nullable=False + ) + file_name: Mapped[str] = mapped_column(String, nullable=False) + gcs_path: Mapped[str] = mapped_column(String, nullable=False) + uploaded_at: Mapped[DateTime] = mapped_column( + DateTime(timezone=True), server_default=func.now() + ) + original_file_name = Column(String, nullable=True) + meter_activity: Mapped["MeterActivities"] = relationship( + "MeterActivities", back_populates="photos" + ) + + +class ActivityTypeLU(Base): + __tablename__ = "ActivityTypeLU" + name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + permission: Mapped[str] = mapped_column(String) + + +class MeterObservations(Base): + __tablename__ = "MeterObservations" + timestamp: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + value: Mapped[float] = mapped_column(Float, nullable=False) + notes: Mapped[str] = mapped_column(String) + ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) + submitting_user_id: Mapped[int] = mapped_column(Integer, ForeignKey("Users.id")) + meter_id: Mapped[int] = mapped_column(Integer, ForeignKey("Meters.id"), nullable=False) + observed_property_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False + ) + unit_id: Mapped[int] = mapped_column(Integer, ForeignKey("Units.id"), nullable=False) + location_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Locations.id"), nullable=False + ) + + submitting_user: Mapped["Users"] = relationship() + meter: Mapped["Meters"] = relationship() + observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() + unit: Mapped["Units"] = relationship() + location: Mapped["Locations"] = relationship() + + +class ObservedPropertyTypeLU(Base): + __tablename__ = "ObservedPropertyTypeLU" + name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + context: Mapped[str] = mapped_column(String) + units: Mapped[List["Units"]] = relationship(secondary="PropertyUnits") + + +class Units(Base): + __tablename__ = "Units" + name: Mapped[str] = mapped_column(String) + name_short: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +PropertyUnits = Table( + "PropertyUnits", + Base.metadata, + Column("property_id", ForeignKey("ObservedPropertyTypeLU.id"), nullable=False), + Column("unit_id", ForeignKey("Units.id"), nullable=False), +) + + +class meterRegisters(Base): + __tablename__ = "meter_registers" + brand: Mapped[str] = mapped_column(String, nullable=False) + meter_size: Mapped[float] = mapped_column(Float, nullable=False) + part_id: Mapped[int] = mapped_column(Integer, ForeignKey("Parts.id")) + ratio: Mapped[str] = mapped_column(String) + dial_units_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Units.id"), nullable=False + ) + totalizer_units_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Units.id"), nullable=False + ) + number_of_digits: Mapped[int] = mapped_column(Integer, nullable=False) + decimal_digits: Mapped[int] = mapped_column(Integer) + multiplier: Mapped[float] = mapped_column(Float, nullable=False) + notes: Mapped[str] = mapped_column(String) + + dial_units: Mapped["Units"] = relationship(foreign_keys=[dial_units_id]) + totalizer_units: Mapped["Units"] = relationship(foreign_keys=[totalizer_units_id]) diff --git a/api/models/part.py b/api/models/part.py new file mode 100644 index 00000000..dfa5144d --- /dev/null +++ b/api/models/part.py @@ -0,0 +1,74 @@ +from datetime import date +from typing import List, Optional + +from sqlalchemy import Boolean, Column, Date, Float, ForeignKey, Integer, String, Table +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class PartTypeLU(Base): + __tablename__ = "PartTypeLU" + name: Mapped[str] + description: Mapped[str] + + +PartAssociation = Table( + "PartAssociation", + Base.metadata, + Column("part_id", ForeignKey("Parts.id"), nullable=False), + Column("meter_type_id", ForeignKey("MeterTypeLU.id"), nullable=False), +) + + +class Parts(Base): + __tablename__ = "Parts" + + part_number: Mapped[str] = mapped_column(String, unique=True, nullable=False) + description: Mapped[Optional[str]] + vendor: Mapped[Optional[str]] + initial_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + note: Mapped[Optional[str]] + in_use: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) + commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + price: Mapped[Optional[float]] = mapped_column(Float) + + part_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("PartTypeLU.id"), nullable=False + ) + part_type: Mapped["PartTypeLU"] = relationship() + meter_types: Mapped[Optional[List["MeterTypeLU"]]] = relationship( + secondary=PartAssociation + ) + parts_used_links: Mapped[list["PartsUsed"]] = relationship( + back_populates="part", + cascade="all, delete-orphan", + ) + + +class PartsUsed(Base): + __tablename__ = "PartsUsed" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + meter_activity_id: Mapped[int] = mapped_column( + ForeignKey("MeterActivities.id"), nullable=False + ) + part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) + count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) + + part: Mapped["Parts"] = relationship(back_populates="parts_used_links") + meter_activity: Mapped["MeterActivities"] = relationship( + back_populates="parts_used_links" + ) + + +class PartsAdded(Base): + __tablename__ = "PartsAdded" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) + count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) + date: Mapped[date] = mapped_column(Date, nullable=False) + note: Mapped[str | None] = mapped_column(String, nullable=True) + + part: Mapped["Parts"] = relationship() diff --git a/api/models/user.py b/api/models/user.py new file mode 100644 index 00000000..9d68c927 --- /dev/null +++ b/api/models/user.py @@ -0,0 +1,156 @@ +from typing import List, Optional + +from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table, func +from sqlalchemy.orm import Mapped, deferred, mapped_column, relationship + +from api.models.base import Base + + +class Users(Base): + __tablename__ = "Users" + + full_name: Mapped[str] = mapped_column(String) + disabled: Mapped[bool] = mapped_column(Boolean, default=False) + username: Mapped[str] = deferred(mapped_column(String, nullable=False)) + email: Mapped[str] = deferred(mapped_column(String)) + hashed_password: Mapped[str] = deferred(mapped_column(String, nullable=False)) + user_role_id: Mapped[int] = deferred( + mapped_column(Integer, ForeignKey("UserRoles.id"), nullable=False) + ) + + user_role: Mapped["UserRoles"] = relationship("UserRoles") + display_name: Mapped[str] = mapped_column(String, nullable=True) + redirect_page: Mapped[str] = mapped_column(String, nullable=True, default="/") + avatar_img: Mapped[str] = mapped_column(String, nullable=True) + notifications: Mapped[List["Notifications"]] = relationship( + "Notifications", + back_populates="user", + cascade="all, delete-orphan", + foreign_keys="Notifications.user_id", + ) + created_notifications: Mapped[List["Notifications"]] = relationship( + "Notifications", + back_populates="creator", + foreign_keys="Notifications.created_by", + ) + user_sessions: Mapped[List["UserSessions"]] = relationship( + "UserSessions", + back_populates="user", + cascade="all, delete-orphan", + ) + + +class SignOutReasonTypeLU(Base): + __tablename__ = "sign_out_reason_type_lu" + + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[Optional[str]] = mapped_column(String) + user_sessions: Mapped[List["UserSessions"]] = relationship( + "UserSessions", back_populates="sign_out_reason_type" + ) + + +class UserSessions(Base): + __tablename__ = "user_sessions" + + user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True + ) + session_identifier: Mapped[str] = mapped_column( + String(36), nullable=False, unique=True, index=True + ) + ip_address: Mapped[Optional[str]] = mapped_column(String(255)) + user_agent: Mapped[Optional[str]] = mapped_column(String) + device_label: Mapped[Optional[str]] = mapped_column(String(255)) + device_type: Mapped[Optional[str]] = mapped_column(String(100)) + browser: Mapped[Optional[str]] = mapped_column(String(100)) + operating_system: Mapped[Optional[str]] = mapped_column(String(100)) + fingerprint_hash: Mapped[Optional[str]] = mapped_column(String(128), index=True) + signed_in_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + last_seen_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + signed_out_at: Mapped[Optional[DateTime]] = mapped_column(DateTime, index=True) + is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True) + sign_out_reason_type_id: Mapped[Optional[int]] = mapped_column( + Integer, + ForeignKey( + "sign_out_reason_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" + ), + index=True, + ) + + user: Mapped["Users"] = relationship("Users", back_populates="user_sessions") + sign_out_reason_type: Mapped[Optional["SignOutReasonTypeLU"]] = relationship( + "SignOutReasonTypeLU", back_populates="user_sessions" + ) + + +class NotificationTypeLU(Base): + __tablename__ = "notification_type_lu" + + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[Optional[str]] = mapped_column(String) + notifications: Mapped[List["Notifications"]] = relationship( + "Notifications", back_populates="notification_type" + ) + + +class Notifications(Base): + __tablename__ = "notifications" + + user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True + ) + notification_type_id: Mapped[int] = mapped_column( + Integer, + ForeignKey( + "notification_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" + ), + index=True, + ) + created_by: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="SET NULL", onupdate="CASCADE"), index=True + ) + title: Mapped[str] = mapped_column(String(255), nullable=False) + message: Mapped[str] = mapped_column(String, nullable=False) + link: Mapped[Optional[str]] = mapped_column(String(500)) + is_read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, index=True) + created_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + read_at: Mapped[Optional[DateTime]] = mapped_column(DateTime) + + user: Mapped["Users"] = relationship( + "Users", back_populates="notifications", foreign_keys=[user_id] + ) + creator: Mapped[Optional["Users"]] = relationship( + "Users", back_populates="created_notifications", foreign_keys=[created_by] + ) + notification_type: Mapped["NotificationTypeLU"] = relationship( + "NotificationTypeLU", back_populates="notifications" + ) + + +ScopesRoles = Table( + "ScopesRoles", + Base.metadata, + Column("security_scope_id", ForeignKey("SecurityScopes.id"), nullable=False), + Column("user_role_id", ForeignKey("UserRoles.id"), nullable=False), +) + + +class SecurityScopes(Base): + __tablename__ = "SecurityScopes" + scope_string: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String) + + +class UserRoles(Base): + __tablename__ = "UserRoles" + name: Mapped[str] = mapped_column(String, nullable=False) + security_scopes: Mapped[List["SecurityScopes"]] = relationship( + secondary=ScopesRoles + ) diff --git a/api/models/well.py b/api/models/well.py new file mode 100644 index 00000000..16cdb3a3 --- /dev/null +++ b/api/models/well.py @@ -0,0 +1,77 @@ +from typing import List, Optional + +from sqlalchemy import Boolean, DateTime, Float, ForeignKey, Integer, String, func +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class WellUseLU(Base): + __tablename__ = "WellUseLU" + use_type: Mapped[str] = mapped_column(String, nullable=False) + code: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +class WaterSources(Base): + __tablename__ = "water_sources" + name: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String) + + +class WellStatus(Base): + __tablename__ = "well_status" + status: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String) + + +class Wells(Base): + __tablename__ = "Wells" + + name: Mapped[str] = mapped_column(String) + ra_number: Mapped[str] = mapped_column(String) + owners: Mapped[str] = mapped_column(String) + osetag: Mapped[str] = mapped_column(String) + casing: Mapped[str] = mapped_column(String) + total_depth: Mapped[float] = mapped_column(Float) + outside_recorder: Mapped[str] = mapped_column(Boolean) + + use_type_id: Mapped[int] = mapped_column(Integer, ForeignKey("WellUseLU.id")) + location_id: Mapped[int] = mapped_column(Integer, ForeignKey("Locations.id")) + water_source_id: Mapped[int] = mapped_column( + Integer, ForeignKey("water_sources.id") + ) + well_status_id: Mapped[int] = mapped_column(Integer, ForeignKey("well_status.id")) + chloride_group_id: Mapped[int] = mapped_column(Integer) + + use_type: Mapped["WellUseLU"] = relationship() + location: Mapped["Locations"] = relationship() + water_source: Mapped["WaterSources"] = relationship() + well_status: Mapped["WellStatus"] = relationship() + meters: Mapped[List["Meters"]] = relationship("Meters", back_populates="well") + + +class WellMeasurements(Base): + __tablename__ = "WellMeasurements" + + timestamp: Mapped[DateTime] = mapped_column( + DateTime, default=func.now(), nullable=False + ) + value: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + observed_property_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False + ) + submitting_user_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("Users.id"), nullable=True + ) + unit_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Units.id"), nullable=False + ) + well_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Wells.id"), nullable=False + ) + + observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() + submitting_user: Mapped["Users"] = relationship() + unit: Mapped["Units"] = relationship() + well: Mapped["Wells"] = relationship() diff --git a/api/models/work_order.py b/api/models/work_order.py new file mode 100644 index 00000000..eade6ca5 --- /dev/null +++ b/api/models/work_order.py @@ -0,0 +1,34 @@ +from sqlalchemy import DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class workOrderStatusLU(Base): + __tablename__ = "work_order_status_lu" + name: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String, nullable=False) + + +class workOrders(Base): + __tablename__ = "work_orders" + + date_created: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + creator: Mapped[str] = mapped_column(String, nullable=True) + title: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String, nullable=True) + meter_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Meters.id"), nullable=False + ) + status_id: Mapped[int] = mapped_column( + Integer, ForeignKey("work_order_status_lu.id"), nullable=False + ) + notes: Mapped[str] = mapped_column(String, nullable=True) + assigned_user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id"), nullable=True + ) + ose_request_id: Mapped[int] = mapped_column(Integer, nullable=True) + + meter: Mapped["Meters"] = relationship() + status: Mapped["workOrderStatusLU"] = relationship() + assigned_user: Mapped["Users"] = relationship() diff --git a/api/routes/OSE.py b/api/routes/OSE.py index d4e90589..09b4e2e2 100644 --- a/api/routes/OSE.py +++ b/api/routes/OSE.py @@ -1,215 +1,20 @@ -from datetime import datetime, date, time +from datetime import datetime -from pydantic import BaseModel, Field -from fastapi import Depends, APIRouter, HTTPException, Query -from sqlalchemy import select, and_ -from sqlalchemy.orm import Session, joinedload, selectinload +from fastapi import Depends, APIRouter, Query +from sqlalchemy.orm import Session -from api.models.main_models import ( - Meters, - MeterActivities, - MeterObservations, - Wells, - meterRegisters, - workOrders, - ActivityTypeLU, - ObservedPropertyTypeLU, - ServiceTypeLU, - NoteTypeLU, - MeterStatusLU, -) - -from api.schemas import meter_schemas +from api.schemas import meter, ose from api.session import get_db -from api.enums import ScopedUser - -import os - - -API_BASE_URL = os.getenv("API_BASE_URL", "") +from api.auth.dependencies import ScopedUser +from api.services import ose as ose_service ose_router = APIRouter(dependencies=[Depends(ScopedUser.OSE)]) -class MeterActivityPhotoDTO(BaseModel): - name: str - url: str - - -class ObservationDTO(BaseModel): - observation_time: time # Will be associated with a given activity - observation_type: str - measurement: float - units: str - - -class ActivityDTO(BaseModel): - activity_id: int - ose_request_id: int | None = None - activity_start: datetime - activity_end: datetime - activity_type: str - well_ra_number: str | None - well_ose_tag: str | None - description: str - services: list[str] = Field(default_factory=list) - notes: list[str] = Field(default_factory=list) - parts_used: list[str] = Field(default_factory=list) - observations: list[ObservationDTO] = Field(default_factory=list) - meter_activity_photos: list[MeterActivityPhotoDTO] = Field(default_factory=list) - - -class MeterHistoryDTO(BaseModel): - serial_number: str - activities: list[ActivityDTO] = Field(default_factory=list) - - -class DateHistoryDTO(BaseModel): - date: date - meters: list[MeterHistoryDTO] = [] - - -class DisapprovalStatus(BaseModel): - """ - Returns the status of a disapproval request and response - """ - - ose_request_id: int - status: str - notes: str | None = None - disapproval_activity: ActivityDTO | None = None - new_activities: list[ActivityDTO] | None = None - - -def build_activity_photo_url(activity_id: int, photo_name: str) -> str: - return f"{API_BASE_URL}/activities/{activity_id}/photos/{photo_name}" - - -def getObservations( - activity_start: datetime, - activity_end: datetime, - meter_id: int, - observations: list[MeterObservations], -) -> list[ObservationDTO]: - """ - A function to return a list of observations that occurred during a given activity - """ - observations_list = [] - for observation in observations: - if ( - observation.timestamp >= activity_start - and observation.timestamp <= activity_end - and observation.meter_id == meter_id - ): - observation = ObservationDTO( - observation_time=observation.timestamp.time(), - observation_type=observation.observed_property.name, - measurement=observation.value, - units=observation.unit.name_short, - ) - observations_list.append(observation) - - return observations_list - - -def reorganizeHistory( - activities: list[MeterActivities], observations: list[MeterObservations] -) -> list[DateHistoryDTO]: - """ - A function to reorganize the data into the desired format for OSE history - """ - # Reorganize the data into dictionaries mapping date and meter serial number to activities history{date: {meter: [activities]}} - history = {} - for activity in activities: - date = activity.timestamp_start.strftime("%Y-%m-%d") - meter = activity.meter.serial_number - - if date not in history: - history[date] = {} - - if meter not in history[date]: - history[date][meter] = [] - - history[date][meter].append(activity) - - # Build the output list of DateHistoryDTO objects from the history dictionary - history_list = [] - for date, meters in history.items(): - meter_history_list = [] - for meter, activities in meters.items(): - meter_activity_list = [] - for activity in activities: - notes_strings = list(map(lambda note: note.note, activity.notes)) - parts_used_strings = list( - map( - lambda part: f"{part.part_type.name} ({part.part_number})", - activity.parts_used, - ) - ) - services_performed_strings = list( - map( - lambda service: service.service_name, - activity.services_performed, - ) - ) - activity_observations = getObservations( - activity.timestamp_start, - activity.timestamp_end, - activity.meter_id, - observations, - ) - - # Some activities are not associated with a well - # If well is none, set the well's RA number and OSE tag to None - if not activity.well: - ra_number = None - ose_tag = None - else: - ra_number = activity.well.ra_number - ose_tag = activity.well.osetag - - meter_activity_photos = [ - MeterActivityPhotoDTO( - name=p.file_name, - url=build_activity_photo_url(activity.id, p.file_name), - ) - for p in (activity.photos or []) - ] - - activity = ActivityDTO( - activity_id=activity.id, - ose_request_id=activity.work_order.ose_request_id - if activity.work_order - else None, - activity_type=activity.activity_type.name, - activity_start=activity.timestamp_start, - activity_end=activity.timestamp_end, - well_ra_number=ra_number, - well_ose_tag=ose_tag, - description=activity.description, - services=services_performed_strings, - notes=notes_strings, - parts_used=parts_used_strings, - observations=activity_observations, - meter_activity_photos=meter_activity_photos, - ) - meter_activity_list.append(activity) - - meter_history = MeterHistoryDTO( - serial_number=meter, activities=meter_activity_list - ) - meter_history_list.append(meter_history) - - date_history = DateHistoryDTO(date=date, meters=meter_history_list) - history_list.append(date_history) - - return history_list - - @ose_router.get( "/shared_meter_maintenance_history", - response_model=list[DateHistoryDTO], + response_model=list[ose.DateHistoryDTO], tags=["OSE"], ) def get_shared_history( @@ -221,61 +26,12 @@ def get_shared_history( Datetime Format ISO8601: YYYY-MM-DDTHH:MM:SS+HH:MM, example 2023-09-12T00:00:00+00:00 """ - # Get all activities in the date range - activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), - joinedload(MeterActivities.meter), - joinedload(MeterActivities.work_order), - joinedload(MeterActivities.well), - selectinload(MeterActivities.photos), - ) - .filter( - and_( - MeterActivities.timestamp_end >= start_datetime, - MeterActivities.timestamp_end <= end_datetime, - MeterActivities.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Get all observations in the date range - observations = ( - db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - joinedload(MeterObservations.meter), - ) - .filter( - and_( - MeterObservations.timestamp >= start_datetime, - MeterObservations.timestamp <= end_datetime, - MeterObservations.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Store results in list so we can iterate over it multiple times (as opposed to using the SQLAlchemy cursor) - activities_list = list(activities) - observations_list = list(observations) - - return reorganizeHistory(activities_list, observations_list) + return ose_service.get_shared_history(db, start_datetime, end_datetime) @ose_router.get( "/meter_maintenance_by_ose_request_id", - response_model=list[DateHistoryDTO], + response_model=list[ose.DateHistoryDTO], tags=["OSE"], ) def get_ose_maintenance_by_requestID( @@ -285,322 +41,37 @@ def get_ose_maintenance_by_requestID( Returns activities and meter readings for each OSE well associated with a given OSE request ID. """ - # Get all activities in the date range - activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), - joinedload(MeterActivities.meter).joinedload(Meters.well), - joinedload(MeterActivities.work_order), - ) - .join(workOrders) - .where( - and_( - workOrders.ose_request_id.in_(ose_request_ids), - MeterActivities.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Convert activities to a list so we can iterate over it multiple times (as opposed to using the SQLAlchemy cursor) - activities_list = list(activities) - - if not activities_list: - return [] - - # Since observations do no include the OSE request ID, figure out what observations are associated with the activities using a date range - activities_start_date = min( - [activity.timestamp_start for activity in activities_list] - ) - activities_end_date = max([activity.timestamp_end for activity in activities_list]) - - # Get all observations in the date range - observations = ( - db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - joinedload(MeterObservations.meter), - ) - .filter( - and_( - MeterObservations.timestamp >= activities_start_date, - MeterObservations.timestamp <= activities_end_date, - MeterObservations.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Store results in list so we can iterate over it multiple times (as opposed to using the SQLAlchemy cursor) - observations_list = list(observations) - - return reorganizeHistory(activities_list, observations_list) + return ose_service.get_maintenance_by_request_ids(db, ose_request_ids) @ose_router.get( "/meter_information", tags=["OSE"], - response_model=meter_schemas.PublicMeter, + response_model=meter.PublicMeter, ) def get_meter_information( serial_number: str, db: Session = Depends(get_db), ): - # Create the basic query - query = select(Meters).options( - joinedload(Meters.meter_type), - joinedload(Meters.well).joinedload(Wells.location), - joinedload(Meters.status), - joinedload(Meters.meter_register).joinedload(meterRegisters.dial_units), - joinedload(Meters.meter_register).joinedload(meterRegisters.totalizer_units), - ) - - query = query.filter(Meters.serial_number == serial_number) - - # Execute the query - meter = db.scalars(query).first() - - if not meter: - raise HTTPException(status_code=404, detail="Meter not found") - - # Manually create the response model because the object and response are organized differently - output_meter = meter_schemas.PublicMeter( - serial_number=meter.serial_number, - status=meter.status.status_name, - well=meter_schemas.PublicMeter.PublicWell( - ra_number=meter.well.ra_number, - osetag=meter.well.osetag, - trss=meter.well.location.trss, - longitude=meter.well.location.longitude, - latitude=meter.well.location.latitude, - ) - if meter.well - else None, - notes=meter.notes, - meter_type=meter_schemas.PublicMeter.MeterType( - brand=meter.meter_type.brand, - model=meter.meter_type.model, - size=meter.meter_type.size, - ), - meter_register=meter_schemas.PublicMeter.MeterRegister( - ratio=meter.meter_register.ratio, - number_of_digits=meter.meter_register.number_of_digits, - decimal_digits=meter.meter_register.decimal_digits, - dial_units=meter.meter_register.dial_units.name, - totalizer_units=meter.meter_register.totalizer_units.name, - multiplier=meter.meter_register.multiplier, - ) - if meter.meter_register - else None, - ) - - return output_meter + return ose_service.get_meter_information(db, serial_number) @ose_router.get( "/disapproval_response_by_request_id", tags=["OSE"], - response_model=DisapprovalStatus, + response_model=ose.DisapprovalStatus, ) def get_disapproval_response_by_request_id( ose_request_id: int, db: Session = Depends(get_db) ): - # Get the work order associated with the OSE request ID - work_order = db.scalars( - select(workOrders) - .options(joinedload(workOrders.status)) - .where(workOrders.ose_request_id == ose_request_id) - ).first() - - # Check if work order is a disapproval as determined by title "OSE Data Issue" - isDisapproval = work_order.title[:14] == "OSE Data Issue" - - if not work_order or not isDisapproval: - raise HTTPException(status_code=404, detail="Work order not found") - - # Get the activity that was originally disapproved of - # Not yet implemented return dummy ActivityDTO - disapproval_activity = ActivityDTO( - activity_id=99999, - activity_type="Disapproval", - activity_start=datetime.now(), - activity_end=datetime.now(), - well_ra_number=None, - well_ose_tag=None, - description="Not yet implemented, need activity ID in disapproval", - services=[], - notes=[], - parts_used=[], - observations=[], - ) - - # Get any new activities that are associated with the disapproval work order - new_activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), - joinedload(MeterActivities.meter).joinedload(Meters.well), - joinedload(MeterActivities.work_order), - ) - .where(MeterActivities.work_order_id == work_order.id) - ) - .unique() - .all() - ) - - # Loop through the new activities and create the ActivityDTO objects - # I also get observations for each activity, which might not be too performant - # but there will likely only be one new activity if any - new_activitiesDTO = [] - for na in new_activities: - notes_strings = list(map(lambda note: note.note, na.notes)) - parts_used_strings = list( - map( - lambda part: f"{part.part_type.name} ({part.part_number})", - na.parts_used, - ) - ) - services_performed_strings = list( - map( - lambda service: service.service_name, - na.services_performed, - ) - ) - - # Get observations for the meter in the time range of the activity - observations = ( - db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - ) - .filter( - and_( - MeterObservations.timestamp >= na.timestamp_start, - MeterObservations.timestamp <= na.timestamp_end, - MeterObservations.meter_id == na.meter_id, - MeterObservations.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Create the observation DTOs - activity_observations = [] - for observation in observations: - observation = ObservationDTO( - observation_time=observation.timestamp.time(), - observation_type=observation.observed_property.name, - measurement=observation.value, - units=observation.unit.name_short, - ) - activity_observations.append(observation) - - activity = ActivityDTO( - activity_id=na.id, - ose_request_id=na.work_order.ose_request_id if na.work_order else None, - activity_type=na.activity_type.name, - activity_start=na.timestamp_start, - activity_end=na.timestamp_end, - well_ra_number=na.meter.well.ra_number if na.meter.well else None, - well_ose_tag=na.meter.well.osetag if na.meter.well else None, - description=na.description, - services=services_performed_strings, - notes=notes_strings, - parts_used=parts_used_strings, - observations=activity_observations, - ) - new_activitiesDTO.append(activity) - - # Create the response model - response = DisapprovalStatus( - ose_request_id=work_order.ose_request_id, - status=work_order.status.name, - notes=work_order.notes, - disapproval_activity=disapproval_activity, - new_activities=new_activitiesDTO, - ) - - return response + return ose_service.get_disapproval_response(db, ose_request_id) @ose_router.get( - "/get_DB_types", tags=["OSE"], response_model=meter_schemas.DBTypesForOSE + "/get_DB_types", tags=["OSE"], response_model=meter.DBTypesForOSE ) def get_DB_types(db: Session = Depends(get_db)): """ Return DB types from lookup tables """ - # Load all the lookup tables - activity_types = db.scalars(select(ActivityTypeLU)).all() - observed_property_types = db.scalars(select(ObservedPropertyTypeLU)).all() - service_types = db.scalars(select(ServiceTypeLU)).all() - note_types = db.scalars(select(NoteTypeLU)).all() - meter_status_types = db.scalars(select(MeterStatusLU)).all() - - # Convert to - activity_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.name, description=x.description - ), - activity_types, - ) - ) - observed_property_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.name, description=x.description - ), - observed_property_types, - ) - ) - service_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.service_name, description=x.description - ), - service_types, - ) - ) - note_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.note, description=x.details - ), - note_types, - ) - ) - meter_status_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.status_name, description=x.description - ), - meter_status_types, - ) - ) - - # Create the response model - response = meter_schemas.DBTypesForOSE( - activity_types=activity_types, - observed_property_types=observed_property_types, - service_types=service_types, - note_types=note_types, - meter_status_types=meter_status_types, - ) - - return response + return ose_service.get_db_types(db) diff --git a/api/routes/activities.py b/api/routes/activities.py index becca741..b16c23a3 100644 --- a/api/routes/activities.py +++ b/api/routes/activities.py @@ -1,47 +1,21 @@ -from fastapi import Depends, APIRouter, Query, File, UploadFile, Form +from fastapi import Depends, APIRouter, File, UploadFile, Form from fastapi.exceptions import HTTPException from fastapi.responses import StreamingResponse -from sqlalchemy.orm import Session, joinedload, undefer -from sqlalchemy.exc import IntegrityError -from sqlalchemy import select, text, or_ -from datetime import datetime -from typing import List, Annotated -from api import security -from api.schemas import meter_schemas -from api.models.main_models import ( - Meters, - ObservedPropertyTypeLU, - Parts, - ActivityTypeLU, - Units, - MeterActivities, - MeterActivityPhotos, - MeterObservations, - ServiceTypeLU, - NoteTypeLU, - Wells, - Locations, - MeterStatusLU, - Users, - workOrders, - workOrderStatusLU, -) +from sqlalchemy.orm import Session +from typing import List +from api.schemas import meter +from api.models.user import Users from api.session import get_db from api.security import get_current_user -from api.enums import ScopedUser, WorkOrderStatus -from pathlib import Path -from google.cloud import storage +from api.services import activities as activity_service +from api.services import storage as storage_service +from api.auth.dependencies import ScopedUser -import uuid import json -import os activity_router = APIRouter() public_activity_router = APIRouter() -BUCKET_NAME = os.getenv("GCP_BUCKET_NAME", "") -PHOTO_PREFIX = os.getenv("GCP_PHOTO_PREFIX", "") - MAX_PHOTOS_PER_REQUEST = 2 MAX_PHOTOS_PER_METER = 6 @@ -52,50 +26,14 @@ async def get_activity_photo( photo_file_name: str, db: Session = Depends(get_db), ): - photo = ( - db.query(MeterActivityPhotos) - .filter( - MeterActivityPhotos.meter_activity_id == activity_id, - MeterActivityPhotos.file_name == photo_file_name, - ) - .first() - ) - - if not photo: - raise HTTPException(status_code=404, detail="Photo not found for this activity") - - try: - client = storage.Client() - bucket = client.bucket(BUCKET_NAME) - blob = bucket.blob(photo.gcs_path) - - # Optional: ensure blob exists (avoids returning empty/500) - if not blob.exists(client=client): - raise HTTPException( - status_code=404, detail="Photo file missing from storage" - ) - - # Pull content type from GCS metadata (fallback if absent) - blob.reload(client=client) - content_type = blob.content_type or "application/octet-stream" - - # 3) Stream back to client - file_obj = blob.open("rb") # streaming file-like object - - # Inline display; if you want download behavior change to 'attachment' - headers = {"Content-Disposition": f'inline; filename="{photo.file_name}"'} - - return StreamingResponse(file_obj, media_type=content_type, headers=headers) - - except HTTPException: - raise - except Exception: - raise HTTPException(status_code=500, detail="Failed to retrieve photo") + photo = storage_service.get_activity_photo_record(db, activity_id, photo_file_name) + file_obj, content_type, headers = storage_service.open_activity_photo(photo) + return StreamingResponse(file_obj, media_type=content_type, headers=headers) @activity_router.post( "/activities", - response_model=meter_schemas.MeterActivity, + response_model=meter.MeterActivity, dependencies=[Depends(ScopedUser.ActivityWrite)], tags=["Activities"], ) @@ -118,264 +56,18 @@ async def post_activity( ) try: - activity_form = meter_schemas.ActivityForm.parse_obj(json.loads(activity)) + activity_form = meter.ActivityForm.parse_obj(json.loads(activity)) except Exception as e: raise HTTPException(status_code=400, detail=f"Invalid activity payload: {e}") - # Set some variables that will be used to determine how the meter is updated - update_meter_state = True - user_level = user.user_role.name - - # First check that the date and time of the activity are newer than the last activity - last_activity = db.scalars( - select(MeterActivities) - .where(MeterActivities.meter_id == activity_form.activity_details.meter_id) - .order_by(MeterActivities.timestamp_end.desc()) - .limit(1) - ).first() - - # Calculate event start and end datetimes - activity_date = activity_form.activity_details.date.date() - # Set the times to have 0 seconds, this prevents accidental duplicate activities - starttime = activity_form.activity_details.start_time.time().replace(second=0) - endtime = activity_form.activity_details.end_time.time().replace(second=0) - start_datetime = datetime.combine(activity_date, starttime) - end_datetime = datetime.combine(activity_date, endtime) - - if last_activity: - if last_activity.timestamp_end > end_datetime: - update_meter_state = False - - if user_level != "Admin": - raise HTTPException( - status_code=409, - detail="Submitted activity is older than the last activity.", - ) - - activity_meter = db.scalars( - select(Meters).where(activity_form.activity_details.meter_id == Meters.id) - ).first() - - activity_type = db.scalars( - select(ActivityTypeLU).where( - activity_form.activity_details.activity_type_id == ActivityTypeLU.id - ) - ).first() - - # Get the location of the activity based on the well associated with the meter - # If there is no well, assume the activity took place at the "Warehouse" - hq_location = db.scalars( - select(Locations).where(Locations.type_id == 1) - ).first() # Probably needs a slug - - if activity_form.current_installation.well_id: - activity_well = db.scalars( - select(Wells).where(activity_form.current_installation.well_id == Wells.id) - ).first() - activity_location = activity_well.location.id - else: - activity_location = hq_location.id - - # ---- Create the activity itself ---- - meter_activity = MeterActivities( - timestamp_start=start_datetime, - timestamp_end=end_datetime, - description=activity_form.maintenance_repair.description, - submitting_user_id=activity_form.activity_details.user_id, - meter_id=activity_form.activity_details.meter_id, - activity_type_id=activity_form.activity_details.activity_type_id, - location_id=activity_location, - ose_share=activity_form.activity_details.share_ose, - water_users=activity_form.current_installation.water_users, + return await activity_service.create_activity( + db=db, + activity_form=activity_form, + user=user, + photos=photos, + max_photos_per_meter=MAX_PHOTOS_PER_METER, ) - # If a work order is associated with the activity, add it to the activity - if activity_form.activity_details.work_order_id: - meter_activity.work_order_id = activity_form.activity_details.work_order_id - - # Add the activity to the database and if it already exists raise an error - try: - db.add(meter_activity) - db.commit() - db.refresh(meter_activity) # make sure meter_activity.id is available - except IntegrityError as _e: - raise HTTPException( - status_code=409, detail="Activity overlaps with existing activity." - ) - - db.flush() - - # Create the observations - if activity_form.activity_details.share_ose: - # Set OSE flag in observation to true - share_ose_observation = True - else: - share_ose_observation = False - - for observation_form in activity_form.observations: - observation_time = observation_form.time.time() - observation_datetime = datetime.combine(activity_date, observation_time) - observation = MeterObservations( - timestamp=observation_datetime, - value=observation_form.reading, - observed_property_type_id=observation_form.property_type_id, - unit_id=observation_form.unit_id, - submitting_user_id=activity_form.activity_details.user_id, - meter_id=activity_form.activity_details.meter_id, - location_id=activity_location, - ose_share=share_ose_observation, - ) - db.add(observation) - - # Associate notes - notes = db.scalars( - select(NoteTypeLU).where( - NoteTypeLU.id.in_(activity_form.notes.selected_note_ids) - ) - ).all() - meter_activity.notes = notes - - # Associate working status note - status_note_type = db.scalars( - select(NoteTypeLU).where( - NoteTypeLU.slug == activity_form.notes.working_on_arrival_slug - ) - ).first() - meter_activity.notes.append(status_note_type) - - # Associate and handle parts use - used_parts = db.scalars( - select(Parts).where(Parts.id.in_(activity_form.part_used_ids)) - ).all() - meter_activity.parts_used = used_parts - - for used_part in used_parts: - used_part.count -= 1 - - # Associate services performed - services = db.scalars( - select(ServiceTypeLU).where( - ServiceTypeLU.id.in_(activity_form.maintenance_repair.service_type_ids) - ) - ).all() - meter_activity.services_performed = services - - db.commit() - - # ---- Update the current state of the meter based on the activity type ---- - meter_statuses = db.scalars(select(MeterStatusLU)).all() - meter_statuses = {status.status_name: status.id for status in meter_statuses} - - if update_meter_state: - if (activity_type.name == "Uninstall") or ( - activity_type.name == "Uninstall and Hold" - ): # This needs to be a slug - activity_meter.location_id = hq_location.id - activity_meter.well_id = None - activity_meter.water_users = None - - if activity_type.name == "Uninstall and Hold": - # Set status as On Hold - activity_meter.status_id = meter_statuses["On Hold"] - else: - # Set status as Uninstalled - activity_meter.status_id = meter_statuses["Warehouse"] - - if activity_type.name == "Install": - activity_meter.well_id = activity_well.id - activity_meter.location_id = activity_location - activity_meter.status_id = meter_statuses["Installed"] - activity_meter.water_users = activity_form.current_installation.water_users - - if activity_type.name == "Scrap": - activity_meter.well_id = None - activity_meter.location_id = None - activity_meter.status_id = meter_statuses["Scrapped"] - activity_meter.water_users = None - activity_meter.meter_owner = None - - if activity_type.name == "Sell": - activity_meter.well_id = None - activity_meter.location_id = None - activity_meter.status_id = meter_statuses["Sold"] - activity_meter.water_users = None - activity_meter.meter_owner = activity_form.current_installation.meter_owner - - if activity_type.name == "Change Water Users": - activity_meter.water_users = activity_form.current_installation.water_users - - # Make updates to the meter based on user's entry in the current installation section - if activity_type.name != "Uninstall": - activity_meter.contact_name = ( - activity_form.current_installation.contact_name - ) - activity_meter.contact_phone = ( - activity_form.current_installation.contact_phone - ) - activity_meter.notes = activity_form.current_installation.notes - - db.commit() - - # ---- Handle photo file uploads ---- - if photos: - print(f"Received {len(photos)} photos") - print(f"Uploading to bucket={BUCKET_NAME}, prefix={PHOTO_PREFIX}") - client = storage.Client() - bucket = client.bucket(BUCKET_NAME) - - for file in photos: - ext = Path(file.filename).suffix or ".jpg" - unique_name = f"{uuid.uuid4()}{ext}" - blob_path = f"{PHOTO_PREFIX}/{meter_activity.id}/{unique_name}" - blob = bucket.blob(blob_path) - - # Upload file content directly - try: - contents = await file.read() - print(f"Uploading {file.filename}, size={len(contents)} bytes") - - blob.upload_from_string(contents, content_type=file.content_type) - print(f"Uploaded to gs://{BUCKET_NAME}/{blob_path}") - except Exception as e: - print(f"ERROR uploading {file.filename}: {e}") - raise - - photo = MeterActivityPhotos( - meter_activity_id=meter_activity.id, - file_name=unique_name, - gcs_path=blob_path, - ) - db.add(photo) - - db.commit() - print(f"Saved {len(photos)} photos for activity {meter_activity.id}") - db.refresh(meter_activity) - - # ---- Enforce per-meter retention ---- - all_photos = ( - db.query(MeterActivityPhotos) - .join(MeterActivities) - .filter(MeterActivities.meter_id == meter_activity.meter_id) - .order_by(MeterActivityPhotos.uploaded_at.desc()) - .all() - ) - - if len(all_photos) > MAX_PHOTOS_PER_METER: - # keep newest MAX_PHOTOS_PER_METER, delete the rest - to_delete = all_photos[MAX_PHOTOS_PER_METER:] - for old_photo in to_delete: - try: - bucket.blob(old_photo.gcs_path).delete() - except Exception as e: - print( - f"Warning: failed to delete {old_photo.gcs_path} from GCS: {e}" - ) - db.delete(old_photo) - - db.commit() - - return meter_activity - @activity_router.patch( "/activities", @@ -383,87 +75,9 @@ async def post_activity( tags=["Activities"], ) def patch_activity( - patch_activity_form: meter_schemas.PatchActivity, db: Session = Depends(get_db) + patch_activity_form: meter.PatchActivity, db: Session = Depends(get_db) ): - """ - Patch an activity. - All input times should be UTC - """ - # Get the activity - activity = db.scalars( - select(MeterActivities).where( - MeterActivities.id == patch_activity_form.activity_id - ) - ).first() - - # Update the activity - activity.timestamp_start = patch_activity_form.timestamp_start - activity.timestamp_end = patch_activity_form.timestamp_end - activity.description = patch_activity_form.description - activity.ose_share = patch_activity_form.ose_share - activity.water_users = patch_activity_form.water_users - - # When updating location, if location_id is null assume the activity took place at the "Warehouse" - if patch_activity_form.location_id is None: - hq_location = db.scalars( - select(Locations).where(Locations.type_id == 1) - ).first() - activity.location_id = hq_location.id - else: - activity.location_id = patch_activity_form.location_id - - # Update the notes - # Easiest approach is to just delete existing and then re-add if there are any - delete_sql = text('DELETE FROM "Notes" WHERE meter_activity_id = :activity_id') - db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) - - if patch_activity_form.note_ids: - insert_sql = text( - 'INSERT INTO "Notes" (meter_activity_id, note_type_id) VALUES (:activity_id, :note_id)' - ) - for note_id in patch_activity_form.note_ids: - db.execute( - insert_sql, - {"activity_id": patch_activity_form.activity_id, "note_id": note_id}, - ) - - # Update the parts used - delete_sql = text('DELETE FROM "PartsUsed" WHERE meter_activity_id = :activity_id') - db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) - - if patch_activity_form.part_ids: - insert_sql = text( - 'INSERT INTO "PartsUsed" (meter_activity_id, part_id) VALUES (:activity_id, :part_id)' - ) - for part_id in patch_activity_form.part_ids: - db.execute( - insert_sql, - {"activity_id": patch_activity_form.activity_id, "part_id": part_id}, - ) - - # Update the services performed - delete_sql = text( - 'DELETE FROM "ServicesPerformed" WHERE meter_activity_id = :activity_id' - ) - db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) - - if patch_activity_form.service_ids: - insert_sql = text( - 'INSERT INTO "ServicesPerformed" (meter_activity_id, service_type_id) VALUES (:activity_id, :service_id)' - ) - for service_id in patch_activity_form.service_ids: - db.execute( - insert_sql, - { - "activity_id": patch_activity_form.activity_id, - "service_id": service_id, - }, - ) - - # Commit the changes - db.commit() - - return {"status": "success"} + return activity_service.patch_activity(db, patch_activity_form) @activity_router.delete( @@ -472,51 +86,7 @@ def patch_activity( tags=["Activities"], ) def delete_activity(activity_id: int, db: Session = Depends(get_db)): - """ - Deletes an activity. - """ - # Get the activity - activity = db.scalars( - select(MeterActivities).where(MeterActivities.id == activity_id) - ).first() - - if not activity: - raise HTTPException(status_code=404, detail="Activity not found.") - - photos = db.scalars( - select(MeterActivityPhotos).where( - MeterActivityPhotos.meter_activity_id == activity_id - ) - ).all() - - storage_client = storage.Client() - bucket = storage_client.bucket(BUCKET_NAME) - - for photo in photos: - try: - blob = bucket.blob(photo.gcs_path) - blob.delete() - print(f"Deleted GCS object: {photo.gcs_path}") - except Exception as e: - print(f"Failed to delete {photo.gcs_path} from bucket: {e}") - - # Delete any notes associated with the activity - sql = text('DELETE FROM "Notes" WHERE meter_activity_id = :activity_id') - db.execute(sql, {"activity_id": activity_id}) - - # Delete any services performed associated with the activity - sql = text('DELETE FROM "ServicesPerformed" WHERE meter_activity_id = :activity_id') - db.execute(sql, {"activity_id": activity_id}) - - # Delete any parts used associated with the activity - sql = text('DELETE FROM "PartsUsed" WHERE meter_activity_id = :activity_id') - db.execute(sql, {"activity_id": activity_id}) - - # Delete the activity - db.delete(activity) - db.commit() - - return {"status": "success"} + return activity_service.delete_activity(db, activity_id) @activity_router.patch( @@ -525,44 +95,10 @@ def delete_activity(activity_id: int, db: Session = Depends(get_db)): tags=["Activities"], ) def patch_observation( - patch_observation_form: meter_schemas.PatchObservation, + patch_observation_form: meter.PatchObservation, db: Session = Depends(get_db), ): - """ - Patch an observation. - All input times should be UTC - """ - # Get the observation - observation = db.scalars( - select(MeterObservations).where( - MeterObservations.id == patch_observation_form.observation_id - ) - ).first() - - # Update the observation - observation.timestamp = patch_observation_form.timestamp - observation.value = patch_observation_form.value - observation.notes = patch_observation_form.notes - observation.observed_property_type_id = ( - patch_observation_form.observed_property_type_id - ) - observation.unit_id = patch_observation_form.unit_id - observation.meter_id = patch_observation_form.meter_id - observation.submitting_user_id = patch_observation_form.submitting_user_id - observation.ose_share = patch_observation_form.ose_share - - # When updating location, if location_id is null assume the observation took place at the "Warehouse" - if patch_observation_form.location_id is None: - hq_location = db.scalars( - select(Locations).where(Locations.type_id == 1) - ).first() - observation.location_id = hq_location.id - else: - observation.location_id = patch_observation_form.location_id - - db.commit() - - return {"status": "success"} + return activity_service.patch_observation(db, patch_observation_form) @activity_router.delete( @@ -571,49 +107,19 @@ def patch_observation( tags=["Activities"], ) def delete_observation(observation_id: int, db: Session = Depends(get_db)): - """ - Deletes an observation. - """ - # Get the observation - observation = db.scalars( - select(MeterObservations).where(MeterObservations.id == observation_id) - ).first() - - # Return error if the observation doesn't exist - if not observation: - raise HTTPException(status_code=404, detail="Observation not found.") - - # Delete the observation - db.delete(observation) - db.commit() - - return {"status": "success"} + return activity_service.delete_observation(db, observation_id) @activity_router.get( "/activity_types", - response_model=List[meter_schemas.ActivityTypeLU], + response_model=List[meter.ActivityTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_activity_types( db: Session = Depends(get_db), user: Users = Depends(get_current_user) ): - """ - Only returns activity types approved for user type. - """ - if user.user_role.name not in ["Admin", "Technician"]: - return [] - else: - activities = db.scalars(select(ActivityTypeLU)).all() - if user.user_role.name != "Admin": - return [ - activity - for activity in activities - if activity.name not in ["Sell", "Scrap"] - ] - - return activities + return activity_service.get_activity_types(db, user) @activity_router.get( @@ -622,49 +128,37 @@ def get_activity_types( tags=["Activities"], ) def get_users(db: Session = Depends(get_db)): - return db.scalars( - select(Users) - .options(undefer(Users.user_role_id)) - .where(Users.disabled == False) - ).all() + return activity_service.get_users(db) @activity_router.get( "/units", - response_model=List[meter_schemas.Unit], + response_model=List[meter.Unit], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_units(db: Session = Depends(get_db)): - return db.scalars(select(Units)).all() + return activity_service.get_units(db) @activity_router.get( "/observed_property_types", - response_model=List[meter_schemas.ObservedPropertyTypeLU], + response_model=List[meter.ObservedPropertyTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_observed_property_types(db: Session = Depends(get_db)): - return ( - db.scalars( - select(ObservedPropertyTypeLU).options( - joinedload(ObservedPropertyTypeLU.units) - ) - ) - .unique() - .all() - ) + return activity_service.get_observed_property_types(db) @activity_router.get( "/service_types", - response_model=List[meter_schemas.ServiceTypeLU], + response_model=List[meter.ServiceTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_service_types(db: Session = Depends(get_db)): - return db.scalars(select(ServiceTypeLU)).all() + return activity_service.get_service_types(db) @activity_router.get( @@ -673,323 +167,4 @@ def get_service_types(db: Session = Depends(get_db)): tags=["Activities"], ) def get_note_types(db: Session = Depends(get_db)): - return db.scalars(select(NoteTypeLU)).all() - - -@activity_router.get( - "/work_orders", - dependencies=[Depends(ScopedUser.Read)], - tags=["Work Orders"], -) -def get_work_orders( - filter_by_status: Annotated[list[WorkOrderStatus], Query()] = [ - WorkOrderStatus.Open - ], - start_date: datetime = Query(datetime.strptime("2024-06-01", "%Y-%m-%d")), - work_order_id: Annotated[list[int] | None, Query()] = None, - assigned_user_id: int | None = None, - q: str | None = None, - db: Session = Depends(get_db), -): - stmt = ( - select(workOrders) - .options( - joinedload(workOrders.status), - joinedload(workOrders.meter), - joinedload(workOrders.assigned_user), - ) - .join(workOrderStatusLU) - .where(workOrderStatusLU.name.in_(filter_by_status)) - .where(workOrders.date_created >= start_date) - ) - - if work_order_id: - stmt = stmt.where(workOrders.id.in_(work_order_id)) - - if assigned_user_id: - stmt = stmt.where(workOrders.assigned_user_id == assigned_user_id) - - if q: - q_like = f"%{q.strip()}%" - stmt = stmt.where( - or_( - workOrders.title.ilike(q_like), - workOrders.description.ilike(q_like), - workOrders.creator.ilike(q_like), - workOrders.notes.ilike(q_like), - workOrders.meter.has(Meters.serial_number.ilike(q_like)), - ) - ) - - work_orders = db.scalars(stmt).all() - - # grab activities separately - relevant_activities = db.scalars( - select(MeterActivities) - .options(joinedload(MeterActivities.location)) - .where(MeterActivities.work_order_id.in_([wo.id for wo in work_orders])) - ).all() - - # group activities by work_order_id - activities_by_wo = {} - for act in relevant_activities: - activities_by_wo.setdefault(act.work_order_id, []).append( - { - "id": act.id, - "timestamp_start": act.timestamp_start, - "timestamp_end": act.timestamp_end, - "description": act.description, - "submitting_user_id": act.submitting_user_id, - "meter_id": act.meter_id, - "activity_type_id": act.activity_type_id, - "location_id": act.location_id, - "location_name": act.location.name if act.location else None, - "ose_share": act.ose_share, - "water_users": act.water_users, - } - ) - - # build output - output = [] - for wo in work_orders: - output.append( - { - "work_order_id": wo.id, - "ose_request_id": wo.ose_request_id, - "date_created": wo.date_created, - "creator": wo.creator, - "meter_id": wo.meter.id, - "meter_serial": wo.meter.serial_number, - "title": wo.title, - "description": wo.description, - "status": wo.status.name, - "notes": wo.notes, - "assigned_user_id": wo.assigned_user_id, - "assigned_user": wo.assigned_user.username - if wo.assigned_user - else None, - "associated_activities": activities_by_wo.get(wo.id, []), - } - ) - - return output - - -# Create work order endpoint -@activity_router.post( - "/work_orders", - dependencies=[Depends(ScopedUser.Admin)], - response_model=meter_schemas.WorkOrder, - tags=["Work Orders"], -) -def create_work_order( - new_work_order: meter_schemas.CreateWorkOrder, db: Session = Depends(get_db) -): - """ - Create a new work order dated to the current time. - The only mandatory inputs are the date, meter ID, and the title of the work order. - """ - # Get status ID Open - open_status = db.scalars( - select(workOrderStatusLU).where(workOrderStatusLU.name == "Open") - ).first() - - # Create a new work order - work_order = workOrders( - date_created=new_work_order.date_created, - meter_id=new_work_order.meter_id, - title=new_work_order.title, - status_id=open_status.id, - ) - - # Add optional fields if they exist - if new_work_order.description: - work_order.description = new_work_order.description - if new_work_order.notes: - work_order.notes = new_work_order.notes - if new_work_order.assigned_user_id: - work_order.assigned_user_id = new_work_order.assigned_user_id - if new_work_order.creator: - work_order.creator = new_work_order.creator - if new_work_order.ose_request_id: - work_order.ose_request_id = new_work_order.ose_request_id - - # Commit the work order - # Database should block empty title and non-unique (date, title, meter_id) combinations - try: - db.add(work_order) - db.commit() - except IntegrityError as _e: - raise HTTPException( - status_code=409, detail="Title empty or already exists for this meter." - ) - - # Create a WorkOrder schema for the updated work order - work_order_schema = meter_schemas.WorkOrder( - work_order_id=work_order.id, - date_created=work_order.date_created, - creator=work_order.creator, - meter_id=work_order.meter.id, - meter_serial=work_order.meter.serial_number, - title=work_order.title, - description=work_order.description, - status=work_order.status.name, - notes=work_order.notes, - assigned_user_id=work_order.assigned_user_id, - assigned_user=work_order.assigned_user.username - if work_order.assigned_user - else None, - ) - - return work_order_schema - - -# Patch work order endpoint -@activity_router.patch( - "/work_orders", - response_model=meter_schemas.WorkOrder, - tags=["Work Orders"], -) -def patch_work_order( - patch_work_order_form: meter_schemas.PatchWorkOrder, - user: Users = Depends(security.get_current_user), - db: Session = Depends(get_db), -): - """ - Patch a work order. - The input schema limits the fields that can be updated to the title, description, status, notes, and assigned user. - This is to prevent confusion with other open work orders. - """ - # Determine if update can be made by Tech - comparison_work_order = meter_schemas.PatchWorkOrder( - work_order_id=patch_work_order_form.work_order_id, - status=patch_work_order_form.status, - notes=patch_work_order_form.notes, - ) - - if comparison_work_order == patch_work_order_form: - update_scope = "Technician" - else: - update_scope = "Admin" - - # Check if the user has the correct permissions to update the work order - if user.user_role.name not in [update_scope, "Admin"]: - raise HTTPException( - status_code=403, - detail="User does not have permission to update this work order.", - ) - - # Get the work order - work_order = db.scalars( - select(workOrders) - .options( - joinedload(workOrders.status), - joinedload(workOrders.meter), - joinedload(workOrders.assigned_user), - ) - .where(workOrders.id == patch_work_order_form.work_order_id) - ).first() - - # Ensure the current user is assigned the work order if they are a technician - if user.user_role.name == "Technician": - if work_order.assigned_user_id != user.id: - raise HTTPException( - status_code=403, - detail="User does not have permission to update this work order.", - ) - - # An empty string for a title will silently fail due to the if statement below. Detect here and return an error to the user. - if patch_work_order_form.title == "": - raise HTTPException(status_code=422, detail="Title cannot be empty.") - - # Update the work order if the field exists - if patch_work_order_form.title: - work_order.title = patch_work_order_form.title - if patch_work_order_form.description: - work_order.description = patch_work_order_form.description - if patch_work_order_form.status: - # Get the status ID of the new status name - new_status = db.scalars( - select(workOrderStatusLU).where( - workOrderStatusLU.name == patch_work_order_form.status - ) - ).first() - work_order.status_id = new_status.id - if patch_work_order_form.notes: - work_order.notes = patch_work_order_form.notes - if patch_work_order_form.creator: - work_order.creator = patch_work_order_form.creator - if patch_work_order_form.assigned_user_id: - work_order.assigned_user_id = patch_work_order_form.assigned_user_id - - # Commit the changes - # Database should block empty title and non-unique (date, title, meter_id) combinations - try: - db.commit() - except IntegrityError as _e: - raise HTTPException( - status_code=409, detail="Title already exists for this meter." - ) - - # Get the updated work order (needed by the frontend) - work_order = db.scalars( - select(workOrders) - .options( - joinedload(workOrders.status), - joinedload(workOrders.meter), - joinedload(workOrders.assigned_user), - ) - .join(workOrderStatusLU) - .where(workOrders.id == patch_work_order_form.work_order_id) - ).first() - - # I was unable to get associated_activities to work with joinedload, so I'm doing it manually here - associated_activities = db.scalars( - select(MeterActivities).where(MeterActivities.work_order_id == work_order.id) - ).all() - - # Create a WorkOrder schema for the updated work order - work_order_schema = meter_schemas.WorkOrder( - work_order_id=work_order.id, - date_created=work_order.date_created, - creator=work_order.creator, - meter_id=work_order.meter.id, - meter_serial=work_order.meter.serial_number, - title=work_order.title, - description=work_order.description, - status=work_order.status.name, - notes=work_order.notes, - assigned_user_id=work_order.assigned_user_id, - assigned_user=work_order.assigned_user.username - if work_order.assigned_user - else None, - associated_activities=list(associated_activities), - ) - - return work_order_schema - - -# Delete work order endpoint -@activity_router.delete( - "/work_orders", - dependencies=[Depends(ScopedUser.Admin)], - tags=["Work Orders"], -) -def delete_work_order(work_order_id: int, db: Session = Depends(get_db)): - """ - Deletes a work order. - """ - # Get the work order - work_order = db.scalars( - select(workOrders).where(workOrders.id == work_order_id) - ).first() - - # Return error if the work order doesn't exist - if not work_order: - raise HTTPException(status_code=404, detail="Work order not found.") - - # Delete the work order - db.delete(work_order) - db.commit() - - return {"status": "success"} + return activity_service.get_note_types(db) diff --git a/api/routes/admin.py b/api/routes/admin.py index 71bd8703..f29876c9 100644 --- a/api/routes/admin.py +++ b/api/routes/admin.py @@ -5,13 +5,13 @@ from typing import List from passlib.context import CryptContext -from api.models.main_models import Users, UserRoles, SecurityScopes +from api.models.user import Users, UserRoles, SecurityScopes -from api.schemas import security_schemas -from api.schemas import admin_schemas +from api.schemas import security +from api.schemas import admin from api.session import get_db -from api.route_util import _patch -from api.enums import ScopedUser +from api.routes.utils import _patch +from api.auth.dependencies import ScopedUser from pathlib import Path from google.cloud import storage @@ -34,12 +34,12 @@ # define response models @admin_router.post( "/users/update_password", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) def update_user_password( - updatedUserPassword: security_schemas.UpdatedUserPassword, + updatedUserPassword: security.UpdatedUserPassword, db: Session = Depends(get_db), ): user = db.scalars( @@ -55,12 +55,12 @@ def update_user_password( @admin_router.patch( "/users", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) def update_user( - updated_user: security_schemas.UpdatedUser, db: Session = Depends(get_db) + updated_user: security.UpdatedUser, db: Session = Depends(get_db) ): _patch(db, Users, updated_user.id, updated_user) @@ -80,11 +80,11 @@ def update_user( @admin_router.post( "/users", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) -def create_user(user: security_schemas.NewUser, db: Session = Depends(get_db)): +def create_user(user: security.NewUser, db: Session = Depends(get_db)): new_user = Users( username=user.username, email=user.email, @@ -114,7 +114,7 @@ def create_user(user: security_schemas.NewUser, db: Session = Depends(get_db)): @admin_router.get( "/users/{id}", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -141,7 +141,7 @@ def get_user_admin(id: int, db: Session = Depends(get_db)): @admin_router.get( "/usersadmin", - response_model=List[security_schemas.User], + response_model=List[security.User], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -165,7 +165,7 @@ def get_users_admin(db: Session = Depends(get_db)): @admin_router.get( "/security_scopes", - response_model=List[security_schemas.SecurityScope], + response_model=List[security.SecurityScope], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -175,7 +175,7 @@ def get_security_scopes(db: Session = Depends(get_db)): @admin_router.get( "/roles", - response_model=List[security_schemas.UserRole], + response_model=List[security.UserRole], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -189,11 +189,11 @@ def get_roles(db: Session = Depends(get_db)): @admin_router.post( "/roles", - response_model=security_schemas.UserRole, + response_model=security.UserRole, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) -def create_role(new_role: security_schemas.UserRole, db: Session = Depends(get_db)): +def create_role(new_role: security.UserRole, db: Session = Depends(get_db)): scopes = [] if new_role.security_scopes: scope_ids = map(lambda s: s.id, new_role.security_scopes) @@ -216,11 +216,11 @@ def create_role(new_role: security_schemas.UserRole, db: Session = Depends(get_d @admin_router.patch( "/roles", - response_model=security_schemas.UserRole, + response_model=security.UserRole, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) -def update_role(updated_role: security_schemas.UserRole, db: Session = Depends(get_db)): +def update_role(updated_role: security.UserRole, db: Session = Depends(get_db)): role = db.scalars(select(UserRoles).where(UserRoles.id == updated_role.id)).first() scope_ids = map(lambda s: s.id, updated_role.security_scopes) @@ -243,7 +243,7 @@ def update_role(updated_role: security_schemas.UserRole, db: Session = Depends(g @admin_router.get( "/db-backups", - response_model=List[admin_schemas.BackupFile], + response_model=List[admin.BackupFile], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -267,7 +267,7 @@ def list_db_backups( blobs_iter = client.list_blobs(BUCKET_NAME, prefix=prefix) - results: list[admin_schemas.BackupFile] = [] + results: list[admin.BackupFile] = [] for i, blob in enumerate(blobs_iter): if i >= limit: break @@ -293,7 +293,7 @@ def list_db_backups( fmt = f"unknown ({ext})" if ext else "unknown" results.append( - admin_schemas.BackupFile( + admin.BackupFile( name=display_name, file_size=int(blob.size or 0), format=fmt, diff --git a/api/routes/chlorides.py b/api/routes/chlorides.py index 44dd32b2..b21f3987 100644 --- a/api/routes/chlorides.py +++ b/api/routes/chlorides.py @@ -5,14 +5,16 @@ from weasyprint import HTML from io import BytesIO from fastapi import APIRouter, Depends, Query -from pydantic import BaseModel from sqlalchemy import and_, select from sqlalchemy.orm import Session, joinedload -from api.schemas import well_schemas -from api.models.main_models import WellMeasurements, Wells, Locations, WellUseLU +from api.schemas import chlorides +from api.schemas import well +from api.models.location import Locations +from api.models.well import WellMeasurements, Wells, WellUseLU from api.session import get_db -from api.enums import ScopedUser, SortDirection +from api.auth.dependencies import ScopedUser +from api.enums import SortDirection from pathlib import Path from jinja2 import Environment, FileSystemLoader, select_autoescape @@ -32,7 +34,7 @@ @public_chlorides_router.get( "/chlorides", - response_model=List[well_schemas.WellMeasurementDTO], + response_model=List[well.WellMeasurementDTO], tags=["Chlorides"], ) def read_chlorides( @@ -59,7 +61,7 @@ def read_chlorides( @public_chlorides_router.get( "/chloride_groups", - response_model=List[well_schemas.ChlorideGroupResponse], + response_model=List[well.ChlorideGroupResponse], tags=["Chlorides"], ) def get_chloride_groups( @@ -93,26 +95,10 @@ def get_chloride_groups( {"id": group_id, "names": sorted(names)} for group_id, names in groups.items() ] - -class MinMaxAvgMedCount(BaseModel): - min: Optional[float] = None - max: Optional[float] = None - avg: Optional[float] = None - median: Optional[float] = None - count: int = 0 - - -class ChlorideReportNums(BaseModel): - north: MinMaxAvgMedCount - south: MinMaxAvgMedCount - east: MinMaxAvgMedCount - west: MinMaxAvgMedCount - - @authenticated_chlorides_router.get( "/chlorides/report", dependencies=[Depends(ScopedUser.Read)], - response_model=ChlorideReportNums, + response_model=chlorides.ChlorideReportNums, tags=["Chlorides"], ) def get_chlorides_report( @@ -121,7 +107,8 @@ def get_chlorides_report( db: Session = Depends(get_db), ): """ - Returns min/max/avg for north/south/east/west halves **within the SE quadrant of New Mexico**, + Returns min/max/avg for north-west/north-east/south-west/south-east quadrants + within the SE quadrant of New Mexico, over the specified [from_month, to_month] inclusive range, for chloride wells in the given group. """ @@ -170,32 +157,33 @@ def get_chlorides_report( ) ] - north_vals: List[float] = [] - south_vals: List[float] = [] - east_vals: List[float] = [] - west_vals: List[float] = [] + north_west_vals: List[float] = [] + north_east_vals: List[float] = [] + south_west_vals: List[float] = [] + south_east_vals: List[float] = [] for val, lat, lon in se_rows: if val is None: continue # skip null chloride values - # North vs South halves within the SE quadrant - if float(lat) >= SE_MID_LAT: - north_vals.append(float(val)) - else: - south_vals.append(float(val)) + lat_value = float(lat) + lon_value = float(lon) + chloride_value = float(val) - # East vs West halves within the SE quadrant - if float(lon) >= SE_MID_LON: - east_vals.append(float(val)) + if lat_value >= SE_MID_LAT and lon_value < SE_MID_LON: + north_west_vals.append(chloride_value) + elif lat_value >= SE_MID_LAT and lon_value >= SE_MID_LON: + north_east_vals.append(chloride_value) + elif lat_value < SE_MID_LAT and lon_value < SE_MID_LON: + south_west_vals.append(chloride_value) else: - west_vals.append(float(val)) + south_east_vals.append(chloride_value) - return ChlorideReportNums( - north=_stats(north_vals), - south=_stats(south_vals), - east=_stats(east_vals), - west=_stats(west_vals), + return chlorides.ChlorideReportNums( + north_west=_stats(north_west_vals), + north_east=_stats(north_east_vals), + south_west=_stats(south_west_vals), + south_east=_stats(south_east_vals), ) @@ -210,7 +198,7 @@ def download_chlorides_report_pdf( db: Session = Depends(get_db), ): """ - Generate a PDF chloride report (north/south/east/west stats) + Generate a PDF chloride report (north-west/north-east/south-west/south-east stats) for the SE quadrant of New Mexico. """ # Re-use existing logic @@ -240,11 +228,11 @@ def download_chlorides_report_pdf( @authenticated_chlorides_router.post( "/chlorides", dependencies=[Depends(ScopedUser.WellMeasurementWrite)], - response_model=well_schemas.ChlorideMeasurement, + response_model=well.ChlorideMeasurement, tags=["Chlorides"], ) def add_chloride_measurement( - chloride_measurement: well_schemas.WellMeasurement, + chloride_measurement: well.WellMeasurement, db: Session = Depends(get_db), ): # Create a new chloride measurement as a WellMeasurement @@ -265,11 +253,11 @@ def add_chloride_measurement( @authenticated_chlorides_router.patch( "/chlorides", dependencies=[Depends(ScopedUser.WellMeasurementWrite)], - response_model=well_schemas.WellMeasurement, + response_model=well.WellMeasurement, tags=["Chlorides"], ) def patch_chloride_measurement( - chloride_measurement_patch: well_schemas.PatchChlorideMeasurement, + chloride_measurement_patch: well.PatchChlorideMeasurement, db: Session = Depends(get_db), ): well_measurement = ( @@ -306,12 +294,12 @@ def delete_chloride_measurement(chloride_measurement_id: int, db: Session = Depe return True -def _stats(values: List[Optional[float]]) -> MinMaxAvgMedCount: +def _stats(values: List[Optional[float]]) -> chlorides.MinMaxAvgMedCount: clean = [v for v in values if v is not None] if not clean: - return MinMaxAvgMedCount() + return chlorides.MinMaxAvgMedCount() - return MinMaxAvgMedCount( + return chlorides.MinMaxAvgMedCount( min=min(clean), max=max(clean), avg=sum(clean) / len(clean), diff --git a/api/routes/maintenance.py b/api/routes/maintenance.py index 4adff0cd..d6f3836f 100644 --- a/api/routes/maintenance.py +++ b/api/routes/maintenance.py @@ -1,7 +1,6 @@ from fastapi import Depends, APIRouter, Query from sqlalchemy import func from sqlalchemy.orm import Session -from pydantic import BaseModel from typing import List from datetime import datetime, date from fastapi.responses import StreamingResponse @@ -9,17 +8,13 @@ from io import BytesIO from collections import defaultdict from matplotlib.pyplot import figure, close -from api.models.main_models import ( - Users, - Meters, - MeterActivities, - ActivityTypeLU, - Locations, - workOrders, - workOrderStatusLU, -) +from api.models.location import Locations +from api.models.meter import ActivityTypeLU, MeterActivities, Meters +from api.models.user import Users +from api.models.work_order import workOrders, workOrderStatusLU +from api.schemas import maintenance from api.session import get_db -from api.enums import ScopedUser +from api.auth.dependencies import ScopedUser from pathlib import Path from jinja2 import Environment, FileSystemLoader, select_autoescape @@ -38,38 +33,10 @@ authenticated_maintenance_router = APIRouter() public_maintenance_router = APIRouter() - -class MeterSummary(BaseModel): - meter: str - count: int - - -class MaintenanceRow(BaseModel): - date_time: datetime - technician: str - meter: str - trss: str - number_of_repairs: int - number_of_pms: int - - -class MaintenanceSummaryResponse(BaseModel): - repairs_by_meter: List[MeterSummary] - pms_by_meter: List[MeterSummary] - table_rows: List[MaintenanceRow] - - -class HomeSummaryResponse(BaseModel): - completed_work_orders: int - repairs_processed: int - reinstallations_processed: int - preventative_maintenance_processed: int - - @public_maintenance_router.get( "/maintenance/home_summary", tags=["Maintenance"], - response_model=HomeSummaryResponse, + response_model=maintenance.HomeSummaryResponse, ) def get_home_summary(db: Session = Depends(get_db)): completed_work_orders = ( @@ -105,7 +72,7 @@ def get_home_summary(db: Session = Depends(get_db)): @authenticated_maintenance_router.get( "/maintenance", tags=["Maintenance"], - response_model=MaintenanceSummaryResponse, + response_model=maintenance.MaintenanceSummaryResponse, dependencies=[Depends(ScopedUser.Read)], ) def get_maintenance_summary( diff --git a/api/routes/meters.py b/api/routes/meters.py index 889a2b00..def625b9 100644 --- a/api/routes/meters.py +++ b/api/routes/meters.py @@ -5,44 +5,32 @@ from sqlalchemy.exc import IntegrityError from fastapi_pagination.ext.sqlalchemy import paginate from fastapi_pagination import LimitOffsetPage -from enum import Enum -from api.schemas import meter_schemas -from api.schemas import well_schemas -from api.models.main_models import ( +from api.schemas import meter +from api.schemas import well +from api.models.location import LandOwners, Locations +from api.models.meter import ( + ActivityTypeLU, Meters, - LandOwners, - MeterActivities, - PartsUsed, - Parts, - MeterObservations, - Locations, - MeterTypeLU, - Wells, MeterStatusLU, + MeterTypeLU, meterRegisters, ) -from api.route_util import _patch, _get +from api.models.well import Wells +from api.routes.utils import _patch, _get from api.session import get_db -from api.enums import ScopedUser, MeterSortByField, MeterStatus, SortDirection -from google.auth import default, impersonated_credentials -from google.cloud import storage -from datetime import timedelta - -import os +from api.services import meters as meter_service +from api.auth.dependencies import ScopedUser +from api.enums import MeterSortByField, MeterStatus, SortDirection authenticated_meter_router = APIRouter() public_meter_router = APIRouter() -# Generate random secret at startup -PHOTO_JWT_EXPIRE_SECONDS = 600 # 10 minutes -BUCKET_NAME = os.getenv("GCP_BUCKET_NAME", "") - # Get paginated, sorted list of meters, filtered by a search string if applicable @authenticated_meter_router.get( "/meters", dependencies=[Depends(ScopedUser.Read)], - response_model=LimitOffsetPage[meter_schemas.MeterListDTO], + response_model=LimitOffsetPage[meter.MeterListDTO], tags=["Meters"], ) def get_meters( @@ -111,12 +99,12 @@ def sort_by_field_to_schema_field(name: MeterSortByField): @authenticated_meter_router.post( "/meters", - response_model=meter_schemas.Meter, + response_model=meter.Meter, dependencies=[Depends(ScopedUser.Admin)], tags=["Meters"], ) def create_meter( - new_meter: meter_schemas.SubmitNewMeter, db: Session = Depends(get_db) + new_meter: meter.SubmitNewMeter, db: Session = Depends(get_db) ): """ Create a new meter. This requires a SN and meter type. @@ -173,7 +161,7 @@ def create_meter( @authenticated_meter_router.get( "/meters_locations", dependencies=[Depends(ScopedUser.Read)], - response_model=List[meter_schemas.MeterMapDTO], + response_model=List[meter.MeterMapDTO], tags=["Meters"], ) def get_meters_locations( @@ -220,24 +208,73 @@ def get_meters_locations( if not meter_ids: return [] # Short-circuit if nothing matched - # Query latest PMs for those meters - pm_query = text( + pm_activity_type_id = db.scalars( + select(ActivityTypeLU.id).where( + ActivityTypeLU.name == "Preventative Maintenance" + ) + ).first() + location_only_activity_type_id = db.scalars( + select(ActivityTypeLU.id).where(ActivityTypeLU.name == "Location Only") + ).first() + + if not pm_activity_type_id: + raise HTTPException( + status_code=500, + detail="Preventative Maintenance activity type is not configured.", + ) + if not location_only_activity_type_id: + raise HTTPException( + status_code=500, + detail="Location Only activity type is not configured.", + ) + + # Query latest PMs tied directly to the meter + meter_pm_query = text( """ - SELECT MAX(timestamp_start) AS last_pm, meter_id + SELECT MAX(timestamp_start) AS last_pm_meter_activity, meter_id FROM "MeterActivities" - WHERE activity_type_id = 4 + WHERE activity_type_id = :pm_activity_type_id AND meter_id = ANY(:mids) GROUP BY meter_id """ ) - pm_years = db.execute(pm_query, {"mids": meter_ids}).fetchall() - pm_dict = {row.meter_id: row.last_pm for row in pm_years} + meter_pm_rows = db.execute( + meter_pm_query, + {"mids": meter_ids, "pm_activity_type_id": pm_activity_type_id}, + ).fetchall() + meter_pm_dict = { + row.meter_id: row.last_pm_meter_activity for row in meter_pm_rows + } + + location_only_dict = {} + + if meter_ids: + location_only_query = text( + """ + SELECT MAX(timestamp_start) AS last_location_only_meter_activity, meter_id + FROM "MeterActivities" + WHERE activity_type_id = :location_only_activity_type_id + AND meter_id = ANY(:mids) + GROUP BY meter_id + """ + ) + location_only_rows = db.execute( + location_only_query, + { + "mids": meter_ids, + "location_only_activity_type_id": location_only_activity_type_id, + }, + ).fetchall() + location_only_dict = { + row.meter_id: row.last_location_only_meter_activity + for row in location_only_rows + } # Map to DTOs manually for added performance meter_map_list = [] for row in result: meter_map_list.append( - meter_schemas.MeterMapDTO( + meter.MeterMapDTO( id=row.id, serial_number=row.serial_number, well={ @@ -251,7 +288,8 @@ def get_meters_locations( "longitude": row.longitude, "trss": row.trss, }, - last_pm=pm_dict.get(row.id), + last_pm_meter_activity=meter_pm_dict.get(row.id), + last_location_only_meter_activity=location_only_dict.get(row.id), ) ) @@ -299,7 +337,7 @@ def get_meter( @authenticated_meter_router.get( "/meter_types", - response_model=List[meter_schemas.MeterTypeLU], + response_model=List[meter.MeterTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Meters"], ) @@ -310,7 +348,7 @@ def get_meter_types(db: Session = Depends(get_db)): # A route to return register types from meter_register table @authenticated_meter_router.get( "/meter_registers", - response_model=List[meter_schemas.MeterRegister], + response_model=List[meter.MeterRegister], dependencies=[Depends(ScopedUser.Read)], tags=["Meters"], ) @@ -326,7 +364,7 @@ def get_meter_registers(db: Session = Depends(get_db)): # A route to return status types from the MeterStatusLU table @authenticated_meter_router.get( "/meter_status_types", - response_model=List[meter_schemas.MeterStatusLU], + response_model=List[meter.MeterStatusLU], dependencies=[Depends(ScopedUser.Read)], tags=["Meters"], ) @@ -336,12 +374,12 @@ def get_meter_status(db: Session = Depends(get_db)): @authenticated_meter_router.patch( "/meter_types", - response_model=meter_schemas.MeterTypeLU, + response_model=meter.MeterTypeLU, dependencies=[Depends(ScopedUser.Admin)], tags=["Meters"], ) def update_meter_type( - updated_meter_type: meter_schemas.MeterTypeLU, db: Session = Depends(get_db) + updated_meter_type: meter.MeterTypeLU, db: Session = Depends(get_db) ): _patch(db, MeterTypeLU, updated_meter_type.id, updated_meter_type) @@ -354,12 +392,12 @@ def update_meter_type( @authenticated_meter_router.post( "/meter_types", - response_model=meter_schemas.MeterTypeLU, + response_model=meter.MeterTypeLU, dependencies=[Depends(ScopedUser.Admin)], tags=["Meters"], ) def create_meter_type( - new_meter_type: meter_schemas.MeterTypeLU, db: Session = Depends(get_db) + new_meter_type: meter.MeterTypeLU, db: Session = Depends(get_db) ): new_type_model = MeterTypeLU( brand=new_meter_type.brand, @@ -380,7 +418,7 @@ def create_meter_type( @authenticated_meter_router.get( "/land_owners", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.LandOwner], + response_model=List[well.LandOwner], tags=["Meters"], ) def get_land_owners( @@ -392,11 +430,11 @@ def get_land_owners( @authenticated_meter_router.patch( "/meter", dependencies=[Depends(ScopedUser.Admin)], - response_model=meter_schemas.Meter, + response_model=meter.Meter, tags=["Meters"], ) def patch_meter( - updated_meter: meter_schemas.SubmitMeterUpdate, db: Session = Depends(get_db) + updated_meter: meter.SubmitMeterUpdate, db: Session = Depends(get_db) ): """ Update the current state of a meter. This is only used by Meter Details on the frontend. @@ -453,127 +491,4 @@ def patch_meter( "/meter_history", dependencies=[Depends(ScopedUser.Read)], tags=["Meters"] ) def get_meter_history(meter_id: int, db: Session = Depends(get_db)): - """ - Get a list of the given meters history. - No defined schema for this at the moment. - """ - - class HistoryType(Enum): - Activity = "Activity" - Observation = "Observation" - LocationChange = "LocationChange" - - activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.location), - joinedload(MeterActivities.submitting_user), - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used_links) - .joinedload(PartsUsed.part) - .joinedload(Parts.part_type), - joinedload(MeterActivities.notes), - joinedload(MeterActivities.services_performed), - ) - .filter(MeterActivities.meter_id == meter_id) - ) - .unique() - .all() - ) - - observations = db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.submitting_user), - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - joinedload(MeterObservations.location), - ) - .filter(MeterObservations.meter_id == meter_id) - ).all() - - # Take all the history object we just got from the database and make them into a object that's easy for the frontend to consume - formattedHistoryItems = [] - itemID = 0 - - for activity in activities: - activity.location.geom = None # FastAPI errors when returning this - - # Find if there is a well associated with the location - activity_well = db.scalars( - select(Wells).where(Wells.location_id == activity.location_id) - ).first() - - photos = [ - { - "id": photo.id, - "file_name": photo.file_name, - "url": create_signed_url(photo.gcs_path), - "uploaded_at": photo.uploaded_at, - } - for photo in activity.photos - ] - - formattedHistoryItems.append( - { - "id": itemID, - "history_type": HistoryType.Activity, - "well": activity_well, - "location": activity.location, - "activity_type": activity.activity_type_id, - "date": activity.timestamp_start, - "history_item": activity, - "photos": photos, - } - ) - itemID += 1 - - for observation in observations: - observation.location.geom = None - - # Find if there is a well associated with the location - observation_well = db.scalars( - select(Wells).where(Wells.location_id == observation.location_id) - ).first() - - formattedHistoryItems.append( - { - "id": itemID, - "history_type": HistoryType.Observation, - "well": observation_well, - "location": observation.location, - "date": observation.timestamp, - "history_item": observation, - } - ) - itemID += 1 - - # Add location history also - - formattedHistoryItems.sort(key=lambda x: x["date"], reverse=True) - - return formattedHistoryItems - - -def create_signed_url(blob_path: str) -> str: - """Create a v4 signed URL for a blob in GCS.""" - source_creds, _ = default() - target_sa = "pvacd-meterapp@waterdatainitiative-271000.iam.gserviceaccount.com" - - creds = impersonated_credentials.Credentials( - source_credentials=source_creds, - target_principal=target_sa, - target_scopes=["https://www.googleapis.com/auth/devstorage.read_only"], - lifetime=3600, - ) - - storage_client = storage.Client(credentials=creds) - bucket = storage_client.bucket(BUCKET_NAME) - blob = bucket.blob(blob_path) - url = blob.generate_signed_url( - version="v4", - expiration=timedelta(seconds=PHOTO_JWT_EXPIRE_SECONDS), - method="GET", - ) - return url + return meter_service.get_meter_history(db, meter_id) diff --git a/api/routes/notifications.py b/api/routes/notifications.py index 0efdad02..a5a8baae 100644 --- a/api/routes/notifications.py +++ b/api/routes/notifications.py @@ -6,9 +6,9 @@ from sqlalchemy import func, select from sqlalchemy.orm import Session, joinedload -from api.enums import ScopedUser -from api.models.main_models import Notifications, NotificationTypeLU, Users -from api.schemas.notification_schemas import ( +from api.auth.dependencies import ScopedUser +from api.models.user import Notifications, NotificationTypeLU, Users +from api.schemas.notifications import ( NotificationCreateRequest, NotificationCreateResult, Notification, diff --git a/api/routes/parts.py b/api/routes/parts.py index ca771877..7afd3ba5 100644 --- a/api/routes/parts.py +++ b/api/routes/parts.py @@ -1,628 +1,226 @@ -from fastapi import Depends, APIRouter, HTTPException, Query -from sqlalchemy.orm import Session, joinedload, selectinload -from sqlalchemy import select, func, literal, union_all -from typing import List, Union, Optional -from datetime import datetime, date, time -from fastapi.responses import StreamingResponse -from weasyprint import HTML -from io import BytesIO -from api.models.main_models import ( - Parts, - PartsUsed, - PartsAdded, - PartAssociation, - PartTypeLU, - Meters, - MeterTypeLU, - meterRegisters, - MeterActivities, +from datetime import date +from typing import List, Optional, Union + +from fastapi import APIRouter, Depends, HTTPException, Query +from fastapi.responses import StreamingResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session, joinedload + +from api.auth.dependencies import ScopedUser +from api.models.meter import MeterTypeLU, Meters +from api.models.part import PartAssociation, PartTypeLU, Parts +from api.routes.utils import _get +from api.schemas import parts +from api.services import parts as part_service +from api.session import get_db + + +part_router = APIRouter() + + +@part_router.get( + "/parts", + response_model=List[parts.Part], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], ) -from api.schemas import part_schemas -from api.session import get_db -from api.route_util import _get -from api.enums import ScopedUser -from sqlalchemy.exc import IntegrityError -from pathlib import Path -from jinja2 import Environment, FileSystemLoader, select_autoescape - -TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" - -templates = Environment( - loader=FileSystemLoader(TEMPLATES_DIR), - autoescape=select_autoescape(["html", "xml"]), +def get_parts( + db: Session = Depends(get_db), + in_use: Optional[bool] = Query(None, description="Filter by in_use status"), +): + return part_service.list_parts(db, in_use) + + +@part_router.get( + "/parts/used", + tags=["Parts"], + dependencies=[Depends(ScopedUser.Read)], ) +def get_parts_used_summary( + from_date: date = Query(..., description="Start date YYYY-MM-DD"), + to_date: date = Query(..., description="End date YYYY-MM-DD"), + parts: List[int] = Query(...), + db: Session = Depends(get_db), +): + return part_service.get_parts_used_summary(db, from_date, to_date, parts) -part_router = APIRouter() +@part_router.get( + "/parts/used/pdf", + tags=["Parts"], + dependencies=[Depends(ScopedUser.Read)], +) +def download_parts_used_pdf( + from_date: date = Query(..., description="Start date YYYY-MM-DD"), + to_date: date = Query(..., description="End date YYYY-MM-DD"), + parts: List[int] = Query(...), + db: Session = Depends(get_db), +): + pdf_io = part_service.build_parts_used_pdf(db, from_date, to_date, parts) -def _build_part_history_response(part_id: int, db: Session) -> part_schemas.PartHistoryResponse: - part = db.scalars(select(Parts).where(Parts.id == part_id)).first() - if not part: - raise HTTPException(status_code=404, detail="Part not found") - - added_q = select( - PartsAdded.id.label("ref_id"), - PartsAdded.part_id.label("part_id"), - PartsAdded.date.label("event_date"), - literal("added").label("event_type"), - PartsAdded.note.label("note"), - PartsAdded.count.label("delta"), - literal(None).label("work_order_id"), - ).where(PartsAdded.part_id == part_id) - - used_q = ( - select( - PartsUsed.id.label("ref_id"), - PartsUsed.part_id.label("part_id"), - MeterActivities.timestamp_start.label("event_date"), - literal("used").label("event_type"), - func.nullif(func.trim(MeterActivities.description), "").label("note"), - (-PartsUsed.count).label("delta"), - MeterActivities.work_order_id.label("work_order_id"), - ) - .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) - .where(PartsUsed.part_id == part_id) + return StreamingResponse( + pdf_io, + media_type="application/pdf", + headers={"Content-Disposition": "attachment; filename=parts_used_report.pdf"}, ) - events = union_all(added_q, used_q).subquery() - - rows = db.execute( - select( - events.c.ref_id, - events.c.part_id, - events.c.event_date, - events.c.event_type, - events.c.note, - events.c.delta, - events.c.work_order_id, - ).order_by(events.c.event_date.asc(), events.c.ref_id.asc()) - ).all() - running = int(part.initial_count) - history: list[part_schemas.PartHistoryRow] = [ - part_schemas.PartHistoryRow( - row_id=f"initial-{part_id}", - part_id=part_id, - event_date=datetime.min, - event_type="initial", - ref_id=None, - note="Initial count", - delta=0, - total_after=running, - work_order_id=None, - ) - ] - - for ref_id, pid, event_date, event_type, note, delta, work_order_id in rows: - if not isinstance(event_date, datetime): - event_date = datetime.combine(event_date, time.min) - - running += int(delta) - history.append( - part_schemas.PartHistoryRow( - row_id=f"{event_type}-{ref_id}", - part_id=pid, - event_date=event_date, - event_type=event_type, - ref_id=ref_id, - note=note, - delta=int(delta), - total_after=running, - work_order_id=work_order_id, - ) - ) +@part_router.get( + "/part_types", + response_model=List[parts.PartTypeLU], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], +) +def get_part_types(db: Session = Depends(get_db)): + return db.scalars(select(PartTypeLU)).all() - return part_schemas.PartHistoryResponse( - part_id=part.id, - part_number=part.part_number, - initial_count=part.initial_count, - current_count=running, - history=history, - ) - - -@part_router.get( - "/parts", - response_model=List[part_schemas.Part], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_parts( - db: Session = Depends(get_db), - in_use: Optional[bool] = Query(None, description="Filter by in_use status"), -): - used_subq = ( - select( - PartsUsed.part_id.label("part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - added_subq = ( - select( - PartsAdded.part_id.label("part_id"), - func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), - ) - .group_by(PartsAdded.part_id) - .subquery() - ) - - current_count = ( - Parts.initial_count - + func.coalesce(added_subq.c.added_sum, 0) - - func.coalesce(used_subq.c.used_sum, 0) - ).label("current_count") - - stmt = ( - select(Parts, current_count) - .outerjoin(used_subq, used_subq.c.part_id == Parts.id) - .outerjoin(added_subq, added_subq.c.part_id == Parts.id) - .options(selectinload(Parts.part_type)) - ) - - if in_use is not None: - stmt = stmt.where(Parts.in_use == in_use) - - rows = db.execute(stmt).all() - - results = [] - for part, curr in rows: - part.current_count = curr - results.append(part) - - return results - - -@part_router.get( - "/parts/used", - tags=["Parts"], - dependencies=[Depends(ScopedUser.Read)], -) -def get_parts_used_summary( - from_date: date = Query(..., description="Start date YYYY-MM-DD"), - to_date: date = Query(..., description="End date YYYY-MM-DD"), - parts: List[int] = Query(...), - db: Session = Depends(get_db), -): - # Convert to datetimes for inclusive range - start_dt = datetime.combine(from_date, datetime.min.time()) - end_dt = datetime.combine(to_date, datetime.max.time()) - - usage_subq = ( - db.query( - PartsUsed.part_id.label("used_part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("quantity"), - ) - .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) - .filter( - MeterActivities.timestamp_start >= start_dt, - MeterActivities.timestamp_start <= end_dt, - PartsUsed.part_id.in_(parts), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - query = ( - db.query( - Parts.id.label("id"), - Parts.part_number, - Parts.description, - Parts.price, - func.coalesce(usage_subq.c.quantity, 0).label("quantity"), - ) - .outerjoin(usage_subq, Parts.id == usage_subq.c.used_part_id) - .filter(Parts.id.in_(parts)) - .order_by(Parts.part_number) - ) - - results = [] - for row in query.all(): - price = float(row.price or 0) - quantity = int(row.quantity or 0) - total = price * quantity - results.append( - { - "id": row.id, - "part_number": row.part_number, - "description": row.description, - "price": price, - "quantity": quantity, - "total": total, - } - ) - - return results - - -@part_router.get( - "/parts/used/pdf", - tags=["Parts"], - dependencies=[Depends(ScopedUser.Read)], -) -def download_parts_used_pdf( - from_date: date = Query(..., description="Start date YYYY-MM-DD"), - to_date: date = Query(..., description="End date YYYY-MM-DD"), - parts: List[int] = Query(...), - db: Session = Depends(get_db), -): - # Re-use your existing logic - results = get_parts_used_summary( - from_date=from_date, to_date=to_date, parts=parts, db=db - ) - - # Add running total just for PDF - running_total = 0.0 - for r in results: - running_total += r["total"] - r["running_total"] = running_total - - template = templates.get_template("parts_used_report.html") - html_content = template.render( - rows=results, - from_date=from_date, - to_date=to_date, - ) - pdf_io = BytesIO() - HTML(string=html_content).write_pdf(pdf_io) - pdf_io.seek(0) - - return StreamingResponse( - pdf_io, - media_type="application/pdf", - headers={"Content-Disposition": "attachment; filename=parts_used_report.pdf"}, - ) - - -@part_router.get( - "/part_types", - response_model=List[part_schemas.PartTypeLU], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_part_types(db: Session = Depends(get_db)): - return db.scalars(select(PartTypeLU)).all() - - -@part_router.get( - "/part", - response_model=Union[part_schemas.Part, part_schemas.Register], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_part(part_id: int, db: Session = Depends(get_db)): - used_subq = ( - select( - PartsUsed.part_id.label("part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - added_subq = ( - select( - PartsAdded.part_id.label("part_id"), - func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), - ) - .group_by(PartsAdded.part_id) - .subquery() - ) - - current_count = ( - Parts.initial_count - + func.coalesce(added_subq.c.added_sum, 0) - - func.coalesce(used_subq.c.used_sum, 0) - ).label("current_count") - - row = db.execute( - select(Parts, current_count) - .outerjoin(used_subq, used_subq.c.part_id == Parts.id) - .outerjoin(added_subq, added_subq.c.part_id == Parts.id) - .where(Parts.id == part_id) - .options( - selectinload(Parts.part_type), - selectinload(Parts.meter_types), - ) - ).first() - - if not row: - return None - - selected_part, curr = row - selected_part.current_count = curr - - # Create the part_schemas.Part instance - returned_part = part_schemas.Part.model_validate(selected_part) - - # If part_type is a Register, we need to load the register details - if selected_part and selected_part.part_type.name == "Register": - register_details = db.scalars( - select(meterRegisters).where(meterRegisters.part_id == selected_part.id) - ).first() - - register_details_obj = None - if register_details is not None: - register_details_obj = ( - part_schemas.Register.register_details.model_validate(register_details) - ) - - # Update the returned_part to include register details - returned_part = part_schemas.Register( - **returned_part.model_dump(exclude_unset=True), - register_settings=register_details_obj, - ) - - return returned_part - - -@part_router.patch( - "/part", - response_model=part_schemas.Part, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def update_part(updated_part: part_schemas.Part, db: Session = Depends(get_db)): - # Update the part (this won't include secondary attributes like associations) - part_db = _get(db, Parts, updated_part.id) - - for k, v in updated_part.model_dump(exclude_unset=True).items(): - if k in ["part_type", "meter_types", "current_count"]: - continue - try: - setattr(part_db, k, v) - except AttributeError as e: - print(e) - continue - - try: - db.add(part_db) - db.commit() - except IntegrityError: - raise HTTPException(status_code=409, detail="Part SN already exists") - - # Load the updated part to get the relationships - part = db.scalars( - select(Parts) - .where(Parts.id == updated_part.id) - .options(joinedload(Parts.part_type)) - ).first() - - # Update associations, _patch only handles direct attributes - if updated_part.meter_types: - part.meter_types = db.scalars( - select(MeterTypeLU).where( - MeterTypeLU.id.in_(map(lambda type: type.id, updated_part.meter_types)) - ) - ).all() - - db.commit() - db.refresh(part) - - return part - - -@part_router.post( - "/parts", - response_model=part_schemas.Part, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def create_part(new_part: part_schemas.Part, db: Session = Depends(get_db)): - new_part_model = Parts( - part_number=new_part.part_number, - part_type_id=new_part.part_type_id, - description=new_part.description, - vendor=new_part.vendor, - initial_count=new_part.initial_count, - note=new_part.note, - in_use=new_part.in_use, - commonly_used=new_part.commonly_used, - price=new_part.price, - ) - - try: - db.add(new_part_model) - db.commit() - except IntegrityError: - raise HTTPException(status_code=409, detail="Part SN already exists") - - # Associate with meter types - if new_part.meter_types: - new_part_model.meter_types = db.scalars( - select(MeterTypeLU).where( - MeterTypeLU.id.in_(map(lambda type: type.id, new_part.meter_types)) - ) - ).all() - - db.commit() - db.refresh(new_part_model) - - # Load part_type relationship - new_part_model.part_type - - return new_part_model - - -@part_router.get( - "/meter_parts", - response_model=List[part_schemas.Part], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_meter_parts(meter_id: int, db: Session = Depends(get_db)): - meter_type_id = db.scalars( - select(Meters.meter_type_id).where(Meters.id == meter_id) - ).first() - - part_id_list = db.scalars( - select(PartAssociation.c.part_id).where( - PartAssociation.c.meter_type_id == meter_type_id - ) - ).all() - - meter_parts = db.scalars( - select(Parts) - .where(Parts.id.in_(part_id_list)) - .options(joinedload(Parts.part_type)) - ).all() - - return meter_parts - - -@part_router.post( - "/parts/add", - response_model=part_schemas.Part, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def add_parts(payload: part_schemas.PartsAddRequest, db: Session = Depends(get_db)): - # Ensure part exists - part = db.scalars(select(Parts).where(Parts.id == payload.part_id)).first() - if not part: - raise HTTPException(status_code=404, detail="Part not found") - - # Insert PartsAdded row (do NOT mutate Parts.initial_count) - added = PartsAdded( - part_id=payload.part_id, - count=payload.count, - date=payload.date, - note=payload.note, - ) - db.add(added) - db.commit() - - # Return updated part with current_count computed (same formula) - used_subq = ( - select( - PartsUsed.part_id.label("part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - added_subq = ( - select( - PartsAdded.part_id.label("part_id"), - func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), - ) - .group_by(PartsAdded.part_id) - .subquery() - ) - - current_count = ( - Parts.initial_count - + func.coalesce(added_subq.c.added_sum, 0) - - func.coalesce(used_subq.c.used_sum, 0) - ).label("current_count") - - row = db.execute( - select(Parts, current_count) - .outerjoin(used_subq, used_subq.c.part_id == Parts.id) - .outerjoin(added_subq, added_subq.c.part_id == Parts.id) - .where(Parts.id == payload.part_id) - .options(selectinload(Parts.part_type), selectinload(Parts.meter_types)) - ).first() - - if not row: - raise HTTPException(status_code=404, detail="Part not found") - - part_obj, curr = row - part_obj.current_count = curr - return part_obj - - -@part_router.get( - "/parts/{part_id}/history", - response_model=part_schemas.PartHistoryResponse, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def get_part_history(part_id: int, db: Session = Depends(get_db)): - return _build_part_history_response(part_id, db) + +@part_router.get( + "/part", + response_model=Union[parts.Part, parts.Register], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], +) +def get_part(part_id: int, db: Session = Depends(get_db)): + return part_service.get_part(db, part_id) @part_router.patch( - "/parts/{part_id}/history", - response_model=part_schemas.PartHistoryResponse, + "/part", + response_model=parts.Part, dependencies=[Depends(ScopedUser.Admin)], tags=["Parts"], ) -def patch_part_history( - part_id: int, - payload: part_schemas.PartHistoryUpdateRequest, - db: Session = Depends(get_db), -): - part = db.scalars(select(Parts).where(Parts.id == part_id)).first() - if not part: - raise HTTPException(status_code=404, detail="Part not found") - - for row in payload.rows: - normalized_note = row.note.strip() if row.note else None - if normalized_note == "": - normalized_note = None - - if row.event_type == "added": - if row.delta <= 0: - raise HTTPException( - status_code=422, - detail="Added parts rows must have a positive change.", - ) - - added_row = db.scalars( - select(PartsAdded).where( - PartsAdded.id == row.ref_id, - PartsAdded.part_id == part_id, - ) - ).first() - if not added_row: - raise HTTPException(status_code=404, detail="Parts added row not found.") - - added_row.count = row.delta - added_row.date = row.event_date.date() - added_row.note = normalized_note +def update_part(updated_part: parts.Part, db: Session = Depends(get_db)): + part_db = _get(db, Parts, updated_part.id) + + for k, v in updated_part.model_dump(exclude_unset=True).items(): + if k in ["part_type", "meter_types", "current_count"]: + continue + try: + setattr(part_db, k, v) + except AttributeError as e: + print(e) continue - if row.delta >= 0: - raise HTTPException( - status_code=422, - detail="Work order rows must have a negative change.", - ) + try: + db.add(part_db) + db.commit() + except IntegrityError: + raise HTTPException(status_code=409, detail="Part SN already exists") - parts_used_row = db.scalars( - select(PartsUsed).where( - PartsUsed.id == row.ref_id, - PartsUsed.part_id == part_id, - ) - ).first() - if not parts_used_row: - raise HTTPException(status_code=404, detail="Parts used row not found.") + part = db.scalars( + select(Parts).where(Parts.id == updated_part.id).options(joinedload(Parts.part_type)) + ).first() - activity = db.scalars( - select(MeterActivities).where( - MeterActivities.id == parts_used_row.meter_activity_id - ) - ).first() - if not activity: - raise HTTPException( - status_code=404, - detail="Meter activity for parts used row not found.", + if updated_part.meter_types: + part.meter_types = db.scalars( + select(MeterTypeLU).where( + MeterTypeLU.id.in_(map(lambda type: type.id, updated_part.meter_types)) ) + ).all() - original_start = activity.timestamp_start - original_end = activity.timestamp_end - duration = original_end - original_start if original_end and original_start else None + db.commit() + db.refresh(part) - parts_used_row.count = abs(row.delta) - activity.timestamp_start = row.event_date - activity.description = normalized_note - if duration is not None: - activity.timestamp_end = row.event_date + duration - else: - activity.timestamp_end = row.event_date + return part + + +@part_router.post( + "/parts", + response_model=parts.Part, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def create_part(new_part: parts.Part, db: Session = Depends(get_db)): + new_part_model = Parts( + part_number=new_part.part_number, + part_type_id=new_part.part_type_id, + description=new_part.description, + vendor=new_part.vendor, + initial_count=new_part.initial_count, + note=new_part.note, + in_use=new_part.in_use, + commonly_used=new_part.commonly_used, + price=new_part.price, + ) + + try: + db.add(new_part_model) + db.commit() + except IntegrityError: + raise HTTPException(status_code=409, detail="Part SN already exists") + + if new_part.meter_types: + new_part_model.meter_types = db.scalars( + select(MeterTypeLU).where( + MeterTypeLU.id.in_(map(lambda type: type.id, new_part.meter_types)) + ) + ).all() db.commit() - return _build_part_history_response(part_id, db) + db.refresh(new_part_model) + new_part_model.part_type + + return new_part_model + + +@part_router.get( + "/meter_parts", + response_model=List[parts.Part], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], +) +def get_meter_parts(meter_id: int, db: Session = Depends(get_db)): + meter_type_id = db.scalars( + select(Meters.meter_type_id).where(Meters.id == meter_id) + ).first() + + part_id_list = db.scalars( + select(PartAssociation.c.part_id).where( + PartAssociation.c.meter_type_id == meter_type_id + ) + ).all() + + meter_parts = db.scalars( + select(Parts) + .where(Parts.id.in_(part_id_list)) + .options(joinedload(Parts.part_type)) + ).all() + + return meter_parts + + +@part_router.post( + "/parts/add", + response_model=parts.Part, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def add_parts(payload: parts.PartsAddRequest, db: Session = Depends(get_db)): + return part_service.add_parts(db, payload) + + +@part_router.get( + "/parts/{part_id}/history", + response_model=parts.PartHistoryResponse, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def get_part_history(part_id: int, db: Session = Depends(get_db)): + return part_service.build_part_history_response(part_id, db) + + +@part_router.patch( + "/parts/{part_id}/history", + response_model=parts.PartHistoryResponse, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def patch_part_history( + part_id: int, + payload: parts.PartHistoryUpdateRequest, + db: Session = Depends(get_db), +): + return part_service.patch_part_history(db, part_id, payload) diff --git a/api/routes/settings.py b/api/routes/settings.py index 90c51e15..4d2ce294 100644 --- a/api/routes/settings.py +++ b/api/routes/settings.py @@ -5,10 +5,10 @@ from PIL import Image, UnidentifiedImageError from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Session -from api.schemas.base import ORMBase +from api.schemas import settings from api.session import get_db from api.security import get_current_user, get_password_hash, verify_password -from api.models.main_models import Users +from api.models.user import Users settings_router = APIRouter() @@ -36,17 +36,12 @@ def get_redirect_page( return {"redirect_page": db_user.redirect_page} - -class RedirectPageUpdate(ORMBase): - redirect_page: str - - @settings_router.post( "/settings/redirect_page", tags=["settings"], ) def post_redirect_page( - update: RedirectPageUpdate, + update: settings.RedirectPageUpdate, db: Session = Depends(get_db), user: Users = Depends(get_current_user), ): @@ -60,22 +55,12 @@ def post_redirect_page( return {"message": "Redirect page updated", "redirect_page": db_user.redirect_page} - -class DisplayNameUpdate(ORMBase): - display_name: str - - -class PasswordResetRequest(ORMBase): - current_password: str - new_password: str - - @settings_router.post( "/settings/display_name", tags=["settings"], ) def post_redirect_page( - update: DisplayNameUpdate, + update: settings.DisplayNameUpdate, db: Session = Depends(get_db), user: Users = Depends(get_current_user), ): @@ -95,7 +80,7 @@ def post_redirect_page( tags=["settings"], ) def post_password_reset( - update: PasswordResetRequest, + update: settings.PasswordResetRequest, db: Session = Depends(get_db), user: Users = Depends(get_current_user), ): diff --git a/api/routes/user_sessions.py b/api/routes/user_sessions.py new file mode 100644 index 00000000..09832923 --- /dev/null +++ b/api/routes/user_sessions.py @@ -0,0 +1,225 @@ +from collections import defaultdict + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from starlette import status + +from api.models.user import UserSessions, Users +from api.schemas import user_sessions +from api.security import get_current_user, get_session_identifier_from_token, oauth2_scheme +from api.session import get_db +from api.auth.session_tracking import mark_session_signed_out + +user_sessions_router = APIRouter(tags=["Login"]) + +def serialize_session( + session: UserSessions, + *, + current_session_identifier: str | None, +) -> user_sessions.UserSessionSummary: + return user_sessions.UserSessionSummary( + session_identifier=session.session_identifier, + device_label=session.device_label, + device_type=session.device_type, + browser=session.browser, + operating_system=session.operating_system, + ip_address=session.ip_address, + signed_in_at=session.signed_in_at, + last_seen_at=session.last_seen_at, + signed_out_at=session.signed_out_at, + is_active=session.is_active, + sign_out_reason_name=( + session.sign_out_reason_type.name if session.sign_out_reason_type else None + ), + is_current=session.session_identifier == current_session_identifier, + ) + + +def get_known_device_key(session: UserSessions) -> str: + if session.fingerprint_hash: + return f"fingerprint:{session.fingerprint_hash}" + + fallback_parts = [ + session.device_label or "unknown-device", + session.browser or "unknown-browser", + session.operating_system or "unknown-os", + session.device_type or "unknown-type", + ] + return f"derived:{'|'.join(fallback_parts)}" + + +@user_sessions_router.get( + "/user-sessions/current/status", + response_model=user_sessions.CurrentSessionStatusResponse, +) +def get_current_session_status( + _: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + current_session_identifier = get_session_identifier_from_token(token) + if not current_session_identifier: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Session identifier is missing from token", + ) + + return user_sessions.CurrentSessionStatusResponse( + session_identifier=current_session_identifier, + is_active=True, + ) + + +@user_sessions_router.get( + "/user-sessions", + response_model=user_sessions.UserSessionsResponse, +) +def list_user_sessions( + db: Session = Depends(get_db), + current_user: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + current_session_identifier = get_session_identifier_from_token(token) + sessions = ( + db.query(UserSessions) + .filter(UserSessions.user_id == current_user.id) + .order_by(UserSessions.last_seen_at.desc(), UserSessions.signed_in_at.desc()) + .all() + ) + + serialized_sessions = [ + serialize_session( + session, + current_session_identifier=current_session_identifier, + ) + for session in sessions + ] + + grouped_sessions: dict[str, list[UserSessions]] = defaultdict(list) + for session in sessions: + grouped_sessions[get_known_device_key(session)].append(session) + + known_devices: list[user_sessions.KnownDeviceSummary] = [] + for device_key, device_sessions in grouped_sessions.items(): + ordered_sessions = sorted( + device_sessions, + key=lambda session: (session.last_seen_at, session.signed_in_at), + reverse=True, + ) + newest_session = ordered_sessions[0] + known_devices.append( + user_sessions.KnownDeviceSummary( + device_key=device_key, + device_label=newest_session.device_label, + device_type=newest_session.device_type, + browser=newest_session.browser, + operating_system=newest_session.operating_system, + session_count=len(device_sessions), + active_session_count=sum( + 1 for session in device_sessions if session.is_active + ), + signed_in_at_first=min( + session.signed_in_at for session in device_sessions + ), + last_seen_at=max(session.last_seen_at for session in device_sessions), + is_current_device=any( + session.session_identifier == current_session_identifier + for session in device_sessions + ), + ) + ) + + known_devices.sort( + key=lambda device: (device.is_current_device, device.last_seen_at), + reverse=True, + ) + + return user_sessions.UserSessionsResponse( + current_session_identifier=current_session_identifier, + sessions=serialized_sessions, + known_devices=known_devices, + ) + + +@user_sessions_router.delete("/user-sessions/{session_identifier}") +def revoke_user_session( + session_identifier: str, + db: Session = Depends(get_db), + current_user: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + current_session_identifier = get_session_identifier_from_token(token) + if session_identifier == current_session_identifier: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The current session cannot be closed from this endpoint", + ) + + session = ( + db.query(UserSessions) + .filter( + UserSessions.session_identifier == session_identifier, + UserSessions.user_id == current_user.id, + ) + .first() + ) + if not session: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Session not found") + + mark_session_signed_out( + db, + session_identifier=session_identifier, + reason_name="forced_logout", + ) + db.commit() + + return { + "message": "Session closed", + "session_identifier": session_identifier, + } + + +@user_sessions_router.post("/logout") +def logout_current_session( + payload: user_sessions.SessionSignOutRequest, + db: Session = Depends(get_db), + _: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + session_identifier = get_session_identifier_from_token(token) + if not session_identifier: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Session identifier is missing from token", + ) + + session = mark_session_signed_out( + db, + session_identifier=session_identifier, + reason_name=payload.sign_out_reason_name, + fingerprint_hash=payload.fingerprint_hash, + ) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + db.commit() + + return {"message": "Session signed out", "session_identifier": session.session_identifier} + + +@user_sessions_router.post("/logout/expired") +def logout_expired_session( + payload: user_sessions.ExpiredSessionSignOutRequest, + db: Session = Depends(get_db), +): + session = mark_session_signed_out( + db, + session_identifier=payload.session_identifier, + reason_name=payload.sign_out_reason_name, + fingerprint_hash=payload.fingerprint_hash, + ) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + db.commit() + + return {"message": "Expired session recorded", "session_identifier": session.session_identifier} diff --git a/api/route_util.py b/api/routes/utils.py similarity index 100% rename from api/route_util.py rename to api/routes/utils.py diff --git a/api/routes/well_measurements.py b/api/routes/well_measurements.py index 3a7704dc..348ea06d 100644 --- a/api/routes/well_measurements.py +++ b/api/routes/well_measurements.py @@ -1,48 +1,17 @@ -from typing import List, Optional, Any, Dict +from typing import List, Optional from datetime import datetime, date -import re from fastapi import Depends, APIRouter, Query, HTTPException from fastapi.responses import StreamingResponse -from sqlalchemy.orm import Session, joinedload -from sqlalchemy import select, and_, func +from sqlalchemy.orm import Session +from sqlalchemy import select -from weasyprint import HTML -from io import BytesIO -from collections import defaultdict -from matplotlib.pyplot import figure, close -from base64 import b64encode - -from api.schemas import well_schemas -from api.models.main_models import ( - WellMeasurements, - ObservedPropertyTypeLU, - Units, - Wells, -) +from api.schemas import well +from api.models.meter import ObservedPropertyTypeLU, Units +from api.models.well import WellMeasurements from api.session import get_db -from api.enums import ScopedUser -from google.cloud import storage - -from pathlib import Path -from jinja2 import Environment, FileSystemLoader, select_autoescape -from zoneinfo import ZoneInfo - -import zlib -import json -import os -import matplotlib - -matplotlib.use("Agg") # Force non-GUI backend - -WOODPECKER_BUCKET_NAME = os.getenv("GCP_WOODPECKER_BUCKET_NAME", "") - -TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" - -templates = Environment( - loader=FileSystemLoader(TEMPLATES_DIR), - autoescape=select_autoescape(["html", "xml"]), -) +from api.auth.dependencies import ScopedUser +from api.services import well_measurements as well_measurement_service authenticated_well_measurement_router = APIRouter() public_well_measurement_router = APIRouter() @@ -51,11 +20,11 @@ @authenticated_well_measurement_router.post( "/waterlevels", dependencies=[Depends(ScopedUser.WellMeasurementWrite)], - response_model=well_schemas.WellMeasurement, + response_model=well.WellMeasurement, tags=["WaterLevels"], ) def add_waterlevel( - waterlevel: well_schemas.NewWaterLevelMeasurement, db: Session = Depends(get_db) + waterlevel: well.NewWaterLevelMeasurement, db: Session = Depends(get_db) ): # Create the well measurement from the form, qualify with units and property type well_measurement = WellMeasurements( @@ -81,111 +50,21 @@ def add_waterlevel( @public_well_measurement_router.get( "/waterlevels/woodpeckers", - response_model=List[well_schemas.WellMeasurementDTO], + response_model=List[well.WellMeasurementDTO], tags=["WaterLevels"], ) def read_woodpecker_waterlevels( well_id: int = Query(..., description="At least one well ID is required"), ): - SP_JOHNSON_WELL_ID = 2599 - - if well_id != SP_JOHNSON_WELL_ID: - raise HTTPException(status_code=400, detail="Invalid well ID") - - DEPTH_TO_WATER_SENSOR_NAME = "Depth to Water" - - results: List[well_schemas.WellMeasurementDTO] = [] - seen_timestamps: set[str] = set() - - client = storage.Client() - bucket = client.bucket(WOODPECKER_BUCKET_NAME) - - for blob in bucket.list_blobs(): - if not blob.name.endswith(".json"): - continue - - content = blob.download_as_text() - payload = json.loads(content) - - device_attributes = payload.get("deviceAttributes") or {} - tz_name = device_attributes.get("timeZone") or "UTC" - ra_number = device_attributes.get("wellId") or "" # e.g. "RA-3502" - - sensor_data = payload.get("sensorData") or [] - depth_sensor = next( - ( - s - for s in sensor_data - if (s.get("sensorName") or "").strip() == DEPTH_TO_WATER_SENSOR_NAME - ), - None, - ) - if not depth_sensor: - # No "Depth to Water" in this file; skip - continue - - measurements = depth_sensor.get("measurements") or [] - for m in measurements: - raw_ts = m.get("timestamp") - if not raw_ts: - continue - - ts = _parse_woodpecker_timestamp(raw_ts, tz_name) - - # Deduplicate by exact instant string (timezone-aware isoformat if tz parsed) - ts_key = ts.isoformat() - if ts_key in seen_timestamps: - continue - seen_timestamps.add(ts_key) - - raw_value = m.get("data") - value = abs(raw_value) if raw_value is not None else None - - measurement_id = _make_measurement_id(well_id, ts, value) - - results.append( - well_schemas.WellMeasurementDTO( - id=measurement_id, - timestamp=ts, - value=value, - submitting_user=well_schemas.WellMeasurementDTO.UserDTO( - full_name="Woodpeckers" - ), - well=well_schemas.WellMeasurementDTO.WellDTO(ra_number=ra_number), - ) - ) - - # Sort combined results across all files - results.sort(key=lambda r: r.timestamp) - return results - - -def _parse_woodpecker_timestamp(ts: str, tz_name: str) -> datetime: - """ - Payload timestamp format: "DD/MM/YYYY HH:mm:ss" - Example: "29/12/2025 00:20:40" - """ - dt_naive = datetime.strptime(ts, "%d/%m/%Y %H:%M:%S") try: - tz = ZoneInfo(tz_name) - except Exception: - # Fallback: keep naive if timezone is missing/invalid - return dt_naive - return dt_naive.replace(tzinfo=tz) - - -def _make_measurement_id(well_id: int, ts: datetime, value: Optional[float]) -> int: - """ - Since the incoming format doesn't provide an integer measurement id, - generate a deterministic-ish int id from well_id + timestamp + value. - """ - key = f"{well_id}|{ts.isoformat()}|{value if value is not None else 'null'}" - return zlib.crc32(key.encode("utf-8")) + return well_measurement_service.read_woodpecker_waterlevels(well_id) + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) @public_well_measurement_router.get( "/waterlevels", - response_model=List[well_schemas.WellMeasurementDTO], + response_model=List[well.WellMeasurementDTO], tags=["WaterLevels"], ) def read_waterlevels( @@ -201,139 +80,18 @@ def read_waterlevels( comparisonYear: Optional[str] = Query(None, pattern=r"^$|^\d{4}$"), db: Session = Depends(get_db), ): - """ - Return well measurements, optionally filtered by from_date/to_date, - with optional averaging and historical comparison. - """ - MONITORING_USE_TYPE_ID = 11 - synthetic_id_counter = -1 - - def group_and_average(measurements, group_by_label: str): - from collections import defaultdict - - grouped = defaultdict(list) - for m in measurements: - key = m.timestamp.strftime( - "%Y-%m" if group_by_label == "month" else "%Y-%m-%d" - ) - grouped[key].append(m.value) - - result = [] - for time_str, values in sorted(grouped.items()): - dt = datetime.strptime( - time_str, - "%Y-%m" if group_by_label == "month" else "%Y-%m-%d", - ) - avg_value = sum(values) / len(values) - nonlocal synthetic_id_counter - result.append( - well_schemas.WellMeasurementDTO( - id=synthetic_id_counter, - timestamp=dt, - value=avg_value, - submitting_user={"full_name": "System"}, - well={"ra_number": "Average of wells"}, - ) - ) - synthetic_id_counter -= 1 - return result - - def get_measurements_by_ids(well_ids, start: Optional[date], end: Optional[date]): - filters = [ - ObservedPropertyTypeLU.name == "Depth to water", - WellMeasurements.well_id.in_(well_ids), - ] - if start: - filters.append(WellMeasurements.timestamp >= start) - if end: - # include full day when end is provided - end_dt = datetime.combine(end, datetime.max.time()) - filters.append(WellMeasurements.timestamp <= end_dt) - - stmt = ( - select(WellMeasurements) - .options( - joinedload(WellMeasurements.submitting_user), - joinedload(WellMeasurements.well), - ) - .join(ObservedPropertyTypeLU) - .where(and_(*filters)) - .order_by(WellMeasurements.well_id, WellMeasurements.timestamp) + try: + return well_measurement_service.read_waterlevels( + db=db, + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + is_averaging_all_wells=isAveragingAllWells, + is_comparing_to_1970_average=isComparingTo1970Average, + comparison_year=comparisonYear, ) - return db.scalars(stmt).all() - - # Decide grouping granularity only if both dates are given - group_by = None - if from_date and to_date: - group_by = "month" if (to_date - from_date).days >= 365 else "day" - - if not well_ids and not isComparingTo1970Average and not comparisonYear: - return [] - - response_data: List[well_schemas.WellMeasurementDTO] = [] - - # Averaged selection (if requested) - if isAveragingAllWells and well_ids: - current_measurements = get_measurements_by_ids(well_ids, from_date, to_date) - averaged = group_and_average(current_measurements, group_by or "day") - response_data.extend(averaged) - - # Raw per-well (if not averaging) - if not isAveragingAllWells and well_ids: - response_data.extend(get_measurements_by_ids(well_ids, from_date, to_date)) - - # Helper: add a comparison average for any given year - def add_year_average(year: int, label: str): - # pick full year or same-month window depending on user’s range - if from_date and to_date and (to_date - from_date).days >= 365: - start = datetime(year, 1, 1) - end = datetime(year, 12, 31, 23, 59, 59) - else: - # fallback: use provided month(s) if available, otherwise full year - if from_date and to_date: - start = datetime(year, from_date.month, 1) - import calendar - - last_day = calendar.monthrange(year, to_date.month)[1] - end = datetime(year, to_date.month, last_day, 23, 59, 59) - else: - start = datetime(year, 1, 1) - end = datetime(year, 12, 31, 23, 59, 59) - - monitoring_ids = [ - row[0] - for row in db.execute( - select(Wells.id).where(Wells.use_type_id == MONITORING_USE_TYPE_ID) - ).all() - ] - year_measurements = get_measurements_by_ids(monitoring_ids, start, end) - averaged = group_and_average(year_measurements, "month") - for dto in averaged: - dto.well.ra_number = label - response_data.extend(averaged) - - if isComparingTo1970Average: - add_year_average(1970, "1970 Average") - - if comparisonYear: - try: - year_int = int(comparisonYear) - except ValueError: - raise HTTPException( - status_code=400, detail="comparisonYear must be a 4-digit year" - ) - - current_year = datetime.now().year - if year_int < 1900 or year_int > current_year: - raise HTTPException( - status_code=400, - detail=f"comparisonYear must be between 1900 and {current_year}", - ) - - if not (isComparingTo1970Average and year_int == 1970): - add_year_average(year_int, f"{year_int} Average") - - return response_data + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) @public_well_measurement_router.get( @@ -365,7 +123,7 @@ def read_waterlevel_report_averages( status_code=400, detail="from_date and/or to_date is required for reports" ) - return get_waterlevel_report_averages( + return well_measurement_service.get_waterlevel_report_averages( well_ids=well_ids, from_date=from_date, to_date=to_date, @@ -392,131 +150,20 @@ def download_waterlevels_pdf( Reuses the read_waterlevels() endpoint for data. """ - # Reuse the endpoint logic - data = read_waterlevels( - well_ids=well_ids, - from_date=from_date, - to_date=to_date, - isAveragingAllWells=isAveragingAllWells, - isComparingTo1970Average=isComparingTo1970Average, - comparisonYear=comparisonYear, - db=db, - ) - - if not data: - raise HTTPException(status_code=404, detail="No water-level data found") - - from_year = from_date.year - shift_years = set() - if isComparingTo1970Average: - shift_years.add(1970) - if comparisonYear: - try: - shift_years.add(int(comparisonYear)) - except ValueError: - pass # already validated above - - def shift_year_safe(dt, new_year: int): - """Shift dt to new_year, handling Feb 29 / month-end safely.""" - import calendar - - try: - return dt.replace(year=new_year) - except ValueError: - last_day = calendar.monthrange(new_year, dt.month)[1] - return dt.replace(year=new_year, day=min(dt.day, last_day)) - - # Prepare rows for the table and points for the chart - rows = [] - data_by_well = defaultdict(list) - - for m in data: - # m is a WellMeasurementDTO from read_waterlevels - ts = m.timestamp - val = m.value - ra = m.well["ra_number"] if isinstance(m.well, dict) else m.well.ra_number - - rows.append( - { - "timestamp": ts.strftime("%Y-%m-%d %H:%M"), - "depth_to_water": val, - "well_ra_number": ra, - } - ) - - chart_ts = ts - if from_year: - m_match = re.match(r"^(\d{4}) Average$", ra) - if m_match: - yr = int(m_match.group(1)) - if yr in shift_years: - chart_ts = shift_year_safe(ts, from_year) - - data_by_well[ra].append((chart_ts, val)) - - def make_line_chart(data: dict, title: str): - if not data: - return "" - fig = figure(figsize=(10, 6)) - ax = fig.add_subplot(111) - for ra_label, measurements in data.items(): - sorted_m = sorted(measurements, key=lambda x: x[0]) - timestamps = [ts for ts, _ in sorted_m] - values = [val for _, val in sorted_m] - ax.plot(timestamps, values, label=ra_label, marker="o") - ax.set_title(title) - ax.set_xlabel("Time") - ax.set_ylabel("Depth to Water") - ax.invert_yaxis() - - # Reserve Space on the top right & move legend outside the plot area to that reserved area - fig.subplots_adjust(right=0.78) - ax.legend( - loc="center left", - bbox_to_anchor=(1.02, 0.5), - borderaxespad=0.0, - frameon=True, - ) - - fig.autofmt_xdate() - buf = BytesIO() - fig.savefig(buf, format="png", bbox_inches="tight") - close(fig) - return b64encode(buf.getvalue()).decode("utf-8") - - chart_b64 = make_line_chart(data_by_well, "Depth of Water over Time") - - report_title = "ROSWELL ARTESIAN BASIN" - report_subtext = None - if isAveragingAllWells: - num_wells = len(well_ids) - well_word = "WELL" if num_wells == 1 else "WELLS" - report_subtext = ( - f"MONTHLY AVERAGE WATER LEVEL WITHIN {num_wells} PVACD RECORDER {well_word}\n" - "AVERAGES TAKEN FROM STEEL TAPE MEASUREMENTS MADE\n" - "ON OR NEAR THE 5TH, 15TH AND 25TH OF EACH MONTH" + try: + pdf_io = well_measurement_service.build_waterlevels_pdf( + db=db, + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + is_averaging_all_wells=isAveragingAllWells, + is_comparing_to_1970_average=isComparingTo1970Average, + comparison_year=comparisonYear, ) - - averages = get_waterlevel_report_averages( - well_ids=well_ids, - from_date=from_date, - to_date=to_date, - db=db, - ) - - html = templates.get_template("waterlevels_report.html").render( - from_date=from_date, - to_date=to_date, - observation_chart=chart_b64, - rows=rows, - report_title=report_title, - report_subtext=report_subtext, - averages=averages, - ) - - pdf_io = BytesIO() - HTML(string=html).write_pdf(pdf_io) - pdf_io.seek(0) + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) + except LookupError as exc: + raise HTTPException(status_code=404, detail=str(exc)) return StreamingResponse( pdf_io, @@ -528,11 +175,11 @@ def make_line_chart(data: dict, title: str): @authenticated_well_measurement_router.patch( "/waterlevels", dependencies=[Depends(ScopedUser.Admin)], - response_model=well_schemas.WellMeasurement, + response_model=well.WellMeasurement, tags=["WaterLevels"], ) def patch_waterlevel( - waterlevel_patch: well_schemas.PatchWaterLevel, db: Session = Depends(get_db) + waterlevel_patch: well.PatchWaterLevel, db: Session = Depends(get_db) ): # Find the measurement well_measurement = db.scalars( @@ -566,104 +213,3 @@ def delete_waterlevel(waterlevel_id: int, db: Session = Depends(get_db)): db.commit() return True - - -def get_waterlevel_report_averages( - *, - well_ids: List[int], - from_date: Optional[date], - to_date: Optional[date], - db: Session, -) -> Dict[str, Any]: - """ - Shared logic used by both JSON endpoint and PDF endpoint. - Returns: - { - "bucket": "month" | "year", - "per_well": [ { well_id, ra_number, period_start, avg_value }, ...], - "all_wells": [ { period_start, avg_value }, ...], - } - """ - DEPTH_TO_WATER_NAME = "Depth to water" - - if not well_ids: - return {"bucket": None, "per_well": [], "all_wells": []} - - if from_date is None and to_date is None: - # Let callers decide whether to raise; for PDF we always have both. - return {"bucket": None, "per_well": [], "all_wells": []} - - start_dt = datetime.combine(from_date, datetime.min.time()) if from_date else None - end_dt = datetime.combine(to_date, datetime.max.time()) if to_date else None - - if from_date and to_date: - delta_days = (to_date - from_date).days - bucket_unit = "year" if delta_days >= 365 else "month" - else: - bucket_unit = "month" - - bucket = func.date_trunc(bucket_unit, WellMeasurements.timestamp).label( - "period_start" - ) - - base_filters = [ - ObservedPropertyTypeLU.name == DEPTH_TO_WATER_NAME, - WellMeasurements.well_id.in_(well_ids), - ] - if start_dt: - base_filters.append(WellMeasurements.timestamp >= start_dt) - if end_dt: - base_filters.append(WellMeasurements.timestamp <= end_dt) - - per_well_stmt = ( - select( - WellMeasurements.well_id.label("well_id"), - Wells.ra_number.label("ra_number"), - bucket, - func.avg(WellMeasurements.value).label("avg_value"), - ) - .join(Wells, Wells.id == WellMeasurements.well_id) - .join( - ObservedPropertyTypeLU, - ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, - ) - .where(and_(*base_filters)) - .group_by(WellMeasurements.well_id, Wells.ra_number, bucket) - .order_by(Wells.ra_number, bucket) - ) - per_well_rows = db.execute(per_well_stmt).all() - - all_wells_stmt = ( - select( - bucket, - func.avg(WellMeasurements.value).label("avg_value"), - ) - .join( - ObservedPropertyTypeLU, - ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, - ) - .where(and_(*base_filters)) - .group_by(bucket) - .order_by(bucket) - ) - all_wells_rows = db.execute(all_wells_stmt).all() - - return { - "bucket": bucket_unit, - "per_well": [ - { - "well_id": r.well_id, - "ra_number": r.ra_number, - "period_start": r.period_start, - "avg_value": float(r.avg_value) if r.avg_value is not None else None, - } - for r in per_well_rows - ], - "all_wells": [ - { - "period_start": r.period_start, - "avg_value": float(r.avg_value) if r.avg_value is not None else None, - } - for r in all_wells_rows - ], - } diff --git a/api/routes/wells.py b/api/routes/wells.py index 47613e56..9039642a 100644 --- a/api/routes/wells.py +++ b/api/routes/wells.py @@ -7,11 +7,13 @@ from fastapi_pagination.ext.sqlalchemy import paginate from fastapi_pagination import LimitOffsetPage -from api.schemas import well_schemas -from api.models.main_models import Locations, WaterSources, WellStatus, WellUseLU, Wells -from api.route_util import _patch, _get +from api.schemas import well +from api.models.location import Locations +from api.models.well import WaterSources, WellStatus, WellUseLU, Wells +from api.routes.utils import _patch, _get from api.session import get_db -from api.enums import ScopedUser, WellSortByField, SortDirection +from api.auth.dependencies import ScopedUser +from api.enums import WellSortByField, SortDirection public_well_router = APIRouter() authenticated_well_router = APIRouter() @@ -20,7 +22,7 @@ @authenticated_well_router.get( "/use_types", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WellUseLU], + response_model=List[well.WellUseLU], tags=["Wells"], ) def get_use_types( @@ -33,7 +35,7 @@ def get_use_types( @authenticated_well_router.get( "/water_sources", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WaterSources], + response_model=List[well.WaterSources], tags=["Wells"], ) def get_water_sources( @@ -46,7 +48,7 @@ def get_water_sources( @authenticated_well_router.get( "/well_status_types", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WellStatus], + response_model=List[well.WellStatus], tags=["Wells"], ) def get_well_status_types( @@ -57,7 +59,7 @@ def get_well_status_types( @public_well_router.get( "/wells/{well_id}", - response_model=well_schemas.WellResponse, + response_model=well.WellResponse, tags=["Wells"], ) def get_well_by_id(well_id: int, db: Session = Depends(get_db)): @@ -79,7 +81,7 @@ def get_well_by_id(well_id: int, db: Session = Depends(get_db)): @public_well_router.get( "/wells", - response_model=LimitOffsetPage[well_schemas.WellResponse], + response_model=LimitOffsetPage[well.WellResponse], tags=["Wells"], ) def get_wells( @@ -155,10 +157,10 @@ def sort_by_field_to_schema_field(name: WellSortByField): @authenticated_well_router.patch( "/wells", dependencies=[Depends(ScopedUser.WellWrite)], - response_model=well_schemas.WellResponse, + response_model=well.WellResponse, tags=["Wells"], ) -def update_well(updated_well: well_schemas.WellUpdate, db: Session = Depends(get_db)): +def update_well(updated_well: well.WellUpdate, db: Session = Depends(get_db)): # If present, update location and remove from model if updated_well.location: _patch(db, Locations, updated_well.location.id, updated_well.location) @@ -216,7 +218,7 @@ def update_well(updated_well: well_schemas.WellUpdate, db: Session = Depends(get dependencies=[Depends(ScopedUser.Admin)], tags=["Wells"], ) -def create_well(new_well: well_schemas.SubmitWellCreate, db: Session = Depends(get_db)): +def create_well(new_well: well.SubmitWellCreate, db: Session = Depends(get_db)): # First, commit the new location that was added with the new well new_location_model = Locations( name=new_well.location.name, @@ -267,7 +269,7 @@ def create_well(new_well: well_schemas.SubmitWellCreate, db: Session = Depends(g @authenticated_well_router.get( "/well_locations", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WellResponse], + response_model=List[well.WellResponse], tags=["Wells"], ) def get_wells_locations( @@ -305,7 +307,7 @@ def get_wells_locations( @authenticated_well_router.get( "/well", dependencies=[Depends(ScopedUser.Read)], - response_model=well_schemas.Well, + response_model=well.Well, tags=["Wells"], ) def get_well(well_id: int, db: Session = Depends(get_db)): @@ -323,7 +325,7 @@ def get_well(well_id: int, db: Session = Depends(get_db)): dependencies=[Depends(ScopedUser.Admin)], tags=["Wells"], ) -def merge_well(well: well_schemas.SubmitWellMerge, db: Session = Depends(get_db)): +def merge_well(well: well.SubmitWellMerge, db: Session = Depends(get_db)): """ Transfers the history of merge well to target well then deletes the merge well """ diff --git a/api/routes/work_orders.py b/api/routes/work_orders.py new file mode 100644 index 00000000..9ca7ff44 --- /dev/null +++ b/api/routes/work_orders.py @@ -0,0 +1,77 @@ +from datetime import datetime +from typing import Annotated + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.orm import Session + +from api.auth.dependencies import ScopedUser +from api.enums import WorkOrderStatus +from api.schemas import meter +from api.security import get_current_user +from api.services import work_orders as work_order_service +from api.models.user import Users +from api.session import get_db + + +work_orders_router = APIRouter() + + +@work_orders_router.get( + "/work_orders", + dependencies=[Depends(ScopedUser.Read)], + tags=["Work Orders"], +) +def get_work_orders( + filter_by_status: Annotated[list[WorkOrderStatus], Query()] = [ + WorkOrderStatus.Open + ], + start_date: datetime = Query(datetime.strptime("2024-06-01", "%Y-%m-%d")), + work_order_id: Annotated[list[int] | None, Query()] = None, + assigned_user_id: int | None = None, + q: str | None = None, + db: Session = Depends(get_db), +): + return work_order_service.list_work_orders( + db=db, + filter_by_status=[status.value for status in filter_by_status], + start_date=start_date, + work_order_id=work_order_id, + assigned_user_id=assigned_user_id, + q=q, + ) + + +@work_orders_router.post( + "/work_orders", + dependencies=[Depends(ScopedUser.Admin)], + response_model=meter.WorkOrder, + tags=["Work Orders"], +) +def create_work_order( + new_work_order: meter.CreateWorkOrder, db: Session = Depends(get_db) +): + return work_order_service.create_work_order(db=db, new_work_order=new_work_order) + + +@work_orders_router.patch( + "/work_orders", + response_model=meter.WorkOrder, + tags=["Work Orders"], +) +def patch_work_order( + patch_work_order_form: meter.PatchWorkOrder, + user: Users = Depends(get_current_user), + db: Session = Depends(get_db), +): + return work_order_service.update_work_order( + db=db, patch_work_order_form=patch_work_order_form, user=user + ) + + +@work_orders_router.delete( + "/work_orders", + dependencies=[Depends(ScopedUser.Admin)], + tags=["Work Orders"], +) +def delete_work_order(work_order_id: int, db: Session = Depends(get_db)): + return work_order_service.delete_work_order(db=db, work_order_id=work_order_id) diff --git a/api/schemas/admin_schemas.py b/api/schemas/admin.py similarity index 100% rename from api/schemas/admin_schemas.py rename to api/schemas/admin.py diff --git a/api/schemas/chlorides.py b/api/schemas/chlorides.py new file mode 100644 index 00000000..dfe7ac06 --- /dev/null +++ b/api/schemas/chlorides.py @@ -0,0 +1,18 @@ +from typing import Optional + +from pydantic import BaseModel + + +class MinMaxAvgMedCount(BaseModel): + min: Optional[float] = None + max: Optional[float] = None + avg: Optional[float] = None + median: Optional[float] = None + count: int = 0 + + +class ChlorideReportNums(BaseModel): + north_west: MinMaxAvgMedCount + north_east: MinMaxAvgMedCount + south_west: MinMaxAvgMedCount + south_east: MinMaxAvgMedCount diff --git a/api/schemas/maintenance.py b/api/schemas/maintenance.py new file mode 100644 index 00000000..72b338b3 --- /dev/null +++ b/api/schemas/maintenance.py @@ -0,0 +1,31 @@ +from datetime import datetime +from typing import List + +from pydantic import BaseModel + + +class MeterSummary(BaseModel): + meter: str + count: int + + +class MaintenanceRow(BaseModel): + date_time: datetime + technician: str + meter: str + trss: str + number_of_repairs: int + number_of_pms: int + + +class MaintenanceSummaryResponse(BaseModel): + repairs_by_meter: List[MeterSummary] + pms_by_meter: List[MeterSummary] + table_rows: List[MaintenanceRow] + + +class HomeSummaryResponse(BaseModel): + completed_work_orders: int + repairs_processed: int + reinstallations_processed: int + preventative_maintenance_processed: int diff --git a/api/schemas/meter_schemas.py b/api/schemas/meter.py similarity index 98% rename from api/schemas/meter_schemas.py rename to api/schemas/meter.py index 3fabc9e9..4186fd48 100644 --- a/api/schemas/meter_schemas.py +++ b/api/schemas/meter.py @@ -1,7 +1,7 @@ from datetime import datetime from api.schemas.base import ORMBase -from api.schemas.well_schemas import Well, Location -from api.schemas.security_schemas import User +from api.schemas.well import Well, Location +from api.schemas.security import User from pydantic import BaseModel from decimal import Decimal @@ -74,7 +74,8 @@ class LocationDTO(ORMBase): serial_number: str well: WellDTO | None = None location: LocationDTO | None = None - last_pm: datetime | None = None + last_pm_meter_activity: datetime | None = None + last_location_only_meter_activity: datetime | None = None class MeterStatusLU(ORMBase): diff --git a/api/schemas/notification_schemas.py b/api/schemas/notifications.py similarity index 94% rename from api/schemas/notification_schemas.py rename to api/schemas/notifications.py index c5a35ef0..614e7ebe 100644 --- a/api/schemas/notification_schemas.py +++ b/api/schemas/notifications.py @@ -1,7 +1,7 @@ from datetime import datetime from api.schemas.base import ORMBase -from api.schemas.security_schemas import User +from api.schemas.security import User class NotificationType(ORMBase): diff --git a/api/schemas/ose.py b/api/schemas/ose.py new file mode 100644 index 00000000..c84a897f --- /dev/null +++ b/api/schemas/ose.py @@ -0,0 +1,50 @@ +from datetime import date, datetime, time + +from pydantic import BaseModel, Field + + +class MeterActivityPhotoDTO(BaseModel): + name: str + url: str + + +class ObservationDTO(BaseModel): + observation_time: time + observation_type: str + measurement: float + units: str + + +class ActivityDTO(BaseModel): + activity_id: int + ose_request_id: int | None = None + activity_start: datetime + activity_end: datetime + activity_type: str + well_ra_number: str | None + well_ose_tag: str | None + description: str + services: list[str] = Field(default_factory=list) + notes: list[str] = Field(default_factory=list) + parts_used: list[str] = Field(default_factory=list) + observations: list[ObservationDTO] = Field(default_factory=list) + meter_activity_photos: list[MeterActivityPhotoDTO] = Field(default_factory=list) + + +class MeterHistoryDTO(BaseModel): + serial_number: str + activities: list[ActivityDTO] = Field(default_factory=list) + + +class DateHistoryDTO(BaseModel): + date: date + meters: list[MeterHistoryDTO] = Field(default_factory=list) + + +class DisapprovalStatus(BaseModel): + ose_request_id: int + status: str + notes: str | None = None + disapproval_activity: ActivityDTO | None = None + new_activities: list[ActivityDTO] | None = None + diff --git a/api/schemas/part_schemas.py b/api/schemas/parts.py similarity index 97% rename from api/schemas/part_schemas.py rename to api/schemas/parts.py index deca5789..767819ad 100644 --- a/api/schemas/part_schemas.py +++ b/api/schemas/parts.py @@ -1,7 +1,8 @@ -from typing import List, Literal, Optional from datetime import date, datetime +from typing import List, Literal, Optional + from api.schemas.base import ORMBase -from api.schemas.meter_schemas import MeterTypeLU +from api.schemas.meter import MeterTypeLU class PartTypeLU(ORMBase): diff --git a/api/schemas/security_schemas.py b/api/schemas/security.py similarity index 91% rename from api/schemas/security_schemas.py rename to api/schemas/security.py index 27201f8d..b99bb69a 100644 --- a/api/schemas/security_schemas.py +++ b/api/schemas/security.py @@ -51,10 +51,11 @@ class User(ORMBase): avatar_img: str | None = None -class Token(BaseModel): - access_token: str - token_type: str - user: User +class Token(BaseModel): + access_token: str + token_type: str + user: User + session_identifier: str | None = None class TokenData(ORMBase): diff --git a/api/schemas/settings.py b/api/schemas/settings.py new file mode 100644 index 00000000..1968f9b2 --- /dev/null +++ b/api/schemas/settings.py @@ -0,0 +1,14 @@ +from api.schemas.base import ORMBase + + +class RedirectPageUpdate(ORMBase): + redirect_page: str + + +class DisplayNameUpdate(ORMBase): + display_name: str + + +class PasswordResetRequest(ORMBase): + current_password: str + new_password: str diff --git a/api/schemas/user_sessions.py b/api/schemas/user_sessions.py new file mode 100644 index 00000000..2381bef0 --- /dev/null +++ b/api/schemas/user_sessions.py @@ -0,0 +1,52 @@ +from datetime import datetime +from typing import Optional + +from api.schemas.base import ORMBase + + +class SessionSignOutRequest(ORMBase): + sign_out_reason_name: str + fingerprint_hash: Optional[str] = None + + +class ExpiredSessionSignOutRequest(SessionSignOutRequest): + session_identifier: str + + +class UserSessionSummary(ORMBase): + session_identifier: str + device_label: str | None = None + device_type: str | None = None + browser: str | None = None + operating_system: str | None = None + ip_address: str | None = None + signed_in_at: datetime + last_seen_at: datetime + signed_out_at: datetime | None = None + is_active: bool + sign_out_reason_name: str | None = None + is_current: bool + + +class KnownDeviceSummary(ORMBase): + device_key: str + device_label: str | None = None + device_type: str | None = None + browser: str | None = None + operating_system: str | None = None + session_count: int + active_session_count: int + signed_in_at_first: datetime + last_seen_at: datetime + is_current_device: bool + + +class UserSessionsResponse(ORMBase): + current_session_identifier: str | None = None + sessions: list[UserSessionSummary] + known_devices: list[KnownDeviceSummary] + + +class CurrentSessionStatusResponse(ORMBase): + session_identifier: str + is_active: bool diff --git a/api/schemas/well_schemas.py b/api/schemas/well.py similarity index 98% rename from api/schemas/well_schemas.py rename to api/schemas/well.py index e19cccd6..d77b3ff3 100644 --- a/api/schemas/well_schemas.py +++ b/api/schemas/well.py @@ -1,7 +1,7 @@ from datetime import datetime from pydantic import BaseModel from typing import List -from api.schemas.security_schemas import User +from api.schemas.security import User from api.schemas.base import ORMBase diff --git a/api/security.py b/api/security.py index ddb0fb2d..819e1651 100644 --- a/api/security.py +++ b/api/security.py @@ -10,16 +10,20 @@ from sqlalchemy.orm import joinedload, undefer, Session from sqlalchemy.sql import select -from api.models.main_models import Users, UserRoles, SecurityScopes -from api.schemas import security_schemas +from api.models.user import Users, UserRoles, SecurityScopes, UserSessions +from api.schemas import security as security_schema +from api.config import settings from api.session import get_db oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -SECRET_KEY = "09d25e194fbb6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" -ALGORITHM = "HS256" -ACCESS_TOKEN_EXPIRE_HOURS = 8 +SECRET_KEY = settings.JWT_SECRET_KEY +ALGORITHM = settings.JWT_ALGORITHM +ACCESS_TOKEN_EXPIRE_HOURS = settings.ACCESS_TOKEN_EXPIRE_HOURS + +if not SECRET_KEY: + raise RuntimeError("JWT_SECRET_KEY environment variable must be set.") invalid_credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -39,6 +43,12 @@ headers={"WWW-Authenticate": "Bearer"}, ) +inactive_session_exception = HTTPException( + status_code=440, + detail="Session is no longer active. Please login again.", + headers={"WWW-Authenticate": "Bearer"}, +) + # Return the current user if credentials were correct, False if not def authenticate_user(login_identifier: str, password: str, db: Session): @@ -110,26 +120,67 @@ def get_current_user( db: Annotated[Session, Depends(get_db)], ) -> Users: try: - payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + payload = decode_access_token(token) username: str = payload.get("sub") if username is None: raise invalid_credentials_exception + session_identifier: str | None = payload.get("sid") + if session_identifier is None: + raise invalid_credentials_exception + user = get_user(username=username, db=db) if user is None: raise invalid_credentials_exception + session = ( + db.query(UserSessions) + .filter( + UserSessions.session_identifier == session_identifier, + UserSessions.user_id == user.id, + UserSessions.is_active.is_(True), + UserSessions.signed_out_at.is_(None), + ) + .first() + ) + if session is None: + raise inactive_session_exception + return user except ExpiredSignatureError: raise expired_token_exception + except HTTPException: + raise + except Exception: raise invalid_credentials_exception +def decode_access_token(token: str, verify_exp: bool = True) -> dict: + decode_options = None + if not verify_exp: + decode_options = {"verify_exp": False} + + return jwt.decode( + token, + SECRET_KEY, + algorithms=[ALGORITHM], + options=decode_options, + ) + + +def get_session_identifier_from_token( + token: str, verify_exp: bool = True +) -> str | None: + payload = decode_access_token(token, verify_exp=verify_exp) + session_identifier: str | None = payload.get("sid") + return session_identifier + + # Provide a list of scope_strings, recieve the current user if those scopes are present, raise auth exception if not def scoped_user(scopes): def get_user(current_user: Users = Security(get_current_user)): @@ -150,10 +201,10 @@ def get_user(current_user: Users = Security(get_current_user)): @authenticated_router.get( - "/users/me", response_model=security_schemas.User, tags=["Login"] + "/users/me", response_model=security_schema.User, tags=["Login"] ) def read_users_me( - current_user: security_schemas.User = Depends(get_current_user), + current_user: security_schema.User = Depends(get_current_user), ): return current_user diff --git a/api/services/__init__.py b/api/services/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/api/services/__init__.py @@ -0,0 +1 @@ + diff --git a/api/services/activities.py b/api/services/activities.py new file mode 100644 index 00000000..8c754bec --- /dev/null +++ b/api/services/activities.py @@ -0,0 +1,360 @@ +from datetime import datetime + +from fastapi import HTTPException +from sqlalchemy import select, text +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session, joinedload, undefer + +from api.models.location import Locations +from api.models.meter import ( + ActivityTypeLU, + MeterActivities, + MeterObservations, + MeterStatusLU, + Meters, + NoteTypeLU, + ObservedPropertyTypeLU, + ServiceTypeLU, + Units, +) +from api.models.part import Parts +from api.models.user import Users +from api.models.well import Wells +from api.schemas import meter +from api.services import storage as storage_service + + +def _get_hq_location(db: Session): + return db.scalars(select(Locations).where(Locations.type_id == 1)).first() + + +async def create_activity( + db: Session, + activity_form: meter.ActivityForm, + user: Users, + photos, + max_photos_per_meter: int, +): + update_meter_state = True + user_level = user.user_role.name + + last_activity = db.scalars( + select(MeterActivities) + .where(MeterActivities.meter_id == activity_form.activity_details.meter_id) + .order_by(MeterActivities.timestamp_end.desc()) + .limit(1) + ).first() + + activity_date = activity_form.activity_details.date.date() + starttime = activity_form.activity_details.start_time.time().replace(second=0) + endtime = activity_form.activity_details.end_time.time().replace(second=0) + start_datetime = datetime.combine(activity_date, starttime) + end_datetime = datetime.combine(activity_date, endtime) + + if last_activity and last_activity.timestamp_end > end_datetime: + update_meter_state = False + if user_level != "Admin": + raise HTTPException( + status_code=409, + detail="Submitted activity is older than the last activity.", + ) + + activity_meter = db.scalars( + select(Meters).where(activity_form.activity_details.meter_id == Meters.id) + ).first() + activity_type = db.scalars( + select(ActivityTypeLU).where( + activity_form.activity_details.activity_type_id == ActivityTypeLU.id + ) + ).first() + hq_location = _get_hq_location(db) + + activity_well = None + if activity_form.current_installation.well_id: + activity_well = db.scalars( + select(Wells).where(activity_form.current_installation.well_id == Wells.id) + ).first() + activity_location = activity_well.location.id + else: + activity_location = hq_location.id + + meter_activity = MeterActivities( + timestamp_start=start_datetime, + timestamp_end=end_datetime, + description=activity_form.maintenance_repair.description, + submitting_user_id=activity_form.activity_details.user_id, + meter_id=activity_form.activity_details.meter_id, + activity_type_id=activity_form.activity_details.activity_type_id, + location_id=activity_location, + ose_share=activity_form.activity_details.share_ose, + water_users=activity_form.current_installation.water_users, + ) + if activity_form.activity_details.work_order_id: + meter_activity.work_order_id = activity_form.activity_details.work_order_id + + try: + db.add(meter_activity) + db.commit() + db.refresh(meter_activity) + except IntegrityError: + raise HTTPException( + status_code=409, detail="Activity overlaps with existing activity." + ) + + db.flush() + + share_ose_observation = bool(activity_form.activity_details.share_ose) + for observation_form in activity_form.observations: + observation_datetime = datetime.combine( + activity_date, observation_form.time.time() + ) + db.add( + MeterObservations( + timestamp=observation_datetime, + value=observation_form.reading, + observed_property_type_id=observation_form.property_type_id, + unit_id=observation_form.unit_id, + submitting_user_id=activity_form.activity_details.user_id, + meter_id=activity_form.activity_details.meter_id, + location_id=activity_location, + ose_share=share_ose_observation, + ) + ) + + notes = db.scalars( + select(NoteTypeLU).where(NoteTypeLU.id.in_(activity_form.notes.selected_note_ids)) + ).all() + meter_activity.notes = notes + + status_note_type = db.scalars( + select(NoteTypeLU).where( + NoteTypeLU.slug == activity_form.notes.working_on_arrival_slug + ) + ).first() + meter_activity.notes.append(status_note_type) + + used_parts = db.scalars( + select(Parts).where(Parts.id.in_(activity_form.part_used_ids)) + ).all() + meter_activity.parts_used = used_parts + for used_part in used_parts: + used_part.count -= 1 + + services = db.scalars( + select(ServiceTypeLU).where( + ServiceTypeLU.id.in_(activity_form.maintenance_repair.service_type_ids) + ) + ).all() + meter_activity.services_performed = services + + db.commit() + + meter_statuses = { + status.status_name: status.id for status in db.scalars(select(MeterStatusLU)).all() + } + if update_meter_state: + if activity_type.name in ["Uninstall", "Uninstall and Hold"]: + activity_meter.location_id = hq_location.id + activity_meter.well_id = None + activity_meter.water_users = None + activity_meter.status_id = ( + meter_statuses["On Hold"] + if activity_type.name == "Uninstall and Hold" + else meter_statuses["Warehouse"] + ) + if activity_type.name == "Install": + activity_meter.well_id = activity_well.id + activity_meter.location_id = activity_location + activity_meter.status_id = meter_statuses["Installed"] + activity_meter.water_users = activity_form.current_installation.water_users + if activity_type.name == "Scrap": + activity_meter.well_id = None + activity_meter.location_id = None + activity_meter.status_id = meter_statuses["Scrapped"] + activity_meter.water_users = None + activity_meter.meter_owner = None + if activity_type.name == "Sell": + activity_meter.well_id = None + activity_meter.location_id = None + activity_meter.status_id = meter_statuses["Sold"] + activity_meter.water_users = None + activity_meter.meter_owner = activity_form.current_installation.meter_owner + if activity_type.name == "Change Water Users": + activity_meter.water_users = activity_form.current_installation.water_users + + if activity_type.name != "Uninstall": + activity_meter.contact_name = activity_form.current_installation.contact_name + activity_meter.contact_phone = activity_form.current_installation.contact_phone + activity_meter.notes = activity_form.current_installation.notes + + db.commit() + + if photos: + await storage_service.save_activity_photos( + db=db, + meter_activity=meter_activity, + photos=photos, + max_photos_per_meter=max_photos_per_meter, + ) + + return meter_activity + + +def patch_activity(db: Session, patch_activity_form: meter.PatchActivity): + activity = db.scalars( + select(MeterActivities).where( + MeterActivities.id == patch_activity_form.activity_id + ) + ).first() + + activity.timestamp_start = patch_activity_form.timestamp_start + activity.timestamp_end = patch_activity_form.timestamp_end + activity.description = patch_activity_form.description + activity.ose_share = patch_activity_form.ose_share + activity.water_users = patch_activity_form.water_users + activity.location_id = ( + _get_hq_location(db).id + if patch_activity_form.location_id is None + else patch_activity_form.location_id + ) + + delete_sql = text('DELETE FROM "Notes" WHERE meter_activity_id = :activity_id') + db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) + if patch_activity_form.note_ids: + insert_sql = text( + 'INSERT INTO "Notes" (meter_activity_id, note_type_id) VALUES (:activity_id, :note_id)' + ) + for note_id in patch_activity_form.note_ids: + db.execute( + insert_sql, + {"activity_id": patch_activity_form.activity_id, "note_id": note_id}, + ) + + delete_sql = text('DELETE FROM "PartsUsed" WHERE meter_activity_id = :activity_id') + db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) + if patch_activity_form.part_ids: + insert_sql = text( + 'INSERT INTO "PartsUsed" (meter_activity_id, part_id) VALUES (:activity_id, :part_id)' + ) + for part_id in patch_activity_form.part_ids: + db.execute( + insert_sql, + {"activity_id": patch_activity_form.activity_id, "part_id": part_id}, + ) + + delete_sql = text( + 'DELETE FROM "ServicesPerformed" WHERE meter_activity_id = :activity_id' + ) + db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) + if patch_activity_form.service_ids: + insert_sql = text( + 'INSERT INTO "ServicesPerformed" (meter_activity_id, service_type_id) VALUES (:activity_id, :service_id)' + ) + for service_id in patch_activity_form.service_ids: + db.execute( + insert_sql, + { + "activity_id": patch_activity_form.activity_id, + "service_id": service_id, + }, + ) + + db.commit() + return {"status": "success"} + + +def delete_activity(db: Session, activity_id: int): + activity = db.scalars( + select(MeterActivities).where(MeterActivities.id == activity_id) + ).first() + if not activity: + raise HTTPException(status_code=404, detail="Activity not found.") + + storage_service.delete_activity_photos(db, activity_id) + for table_name in ["Notes", "ServicesPerformed", "PartsUsed"]: + sql = text(f'DELETE FROM "{table_name}" WHERE meter_activity_id = :activity_id') + db.execute(sql, {"activity_id": activity_id}) + + db.delete(activity) + db.commit() + return {"status": "success"} + + +def patch_observation( + db: Session, patch_observation_form: meter.PatchObservation +): + observation = db.scalars( + select(MeterObservations).where( + MeterObservations.id == patch_observation_form.observation_id + ) + ).first() + + observation.timestamp = patch_observation_form.timestamp + observation.value = patch_observation_form.value + observation.notes = patch_observation_form.notes + observation.observed_property_type_id = ( + patch_observation_form.observed_property_type_id + ) + observation.unit_id = patch_observation_form.unit_id + observation.meter_id = patch_observation_form.meter_id + observation.submitting_user_id = patch_observation_form.submitting_user_id + observation.ose_share = patch_observation_form.ose_share + observation.location_id = ( + _get_hq_location(db).id + if patch_observation_form.location_id is None + else patch_observation_form.location_id + ) + + db.commit() + return {"status": "success"} + + +def delete_observation(db: Session, observation_id: int): + observation = db.scalars( + select(MeterObservations).where(MeterObservations.id == observation_id) + ).first() + if not observation: + raise HTTPException(status_code=404, detail="Observation not found.") + db.delete(observation) + db.commit() + return {"status": "success"} + + +def get_activity_types(db: Session, user: Users): + if user.user_role.name not in ["Admin", "Technician"]: + return [] + + activities = db.scalars(select(ActivityTypeLU)).all() + if user.user_role.name != "Admin": + return [ + activity for activity in activities if activity.name not in ["Sell", "Scrap"] + ] + return activities + + +def get_users(db: Session): + return db.scalars( + select(Users).options(undefer(Users.user_role_id)).where(Users.disabled == False) + ).all() + + +def get_units(db: Session): + return db.scalars(select(Units)).all() + + +def get_observed_property_types(db: Session): + return ( + db.scalars( + select(ObservedPropertyTypeLU).options(joinedload(ObservedPropertyTypeLU.units)) + ) + .unique() + .all() + ) + + +def get_service_types(db: Session): + return db.scalars(select(ServiceTypeLU)).all() + + +def get_note_types(db: Session): + return db.scalars(select(NoteTypeLU)).all() diff --git a/api/services/meters.py b/api/services/meters.py new file mode 100644 index 00000000..61f22ce4 --- /dev/null +++ b/api/services/meters.py @@ -0,0 +1,98 @@ +from enum import Enum + +from sqlalchemy import select +from sqlalchemy.orm import Session, joinedload + +from api.models.meter import MeterActivities, MeterObservations +from api.models.part import Parts, PartsUsed +from api.models.well import Wells +from api.services.storage import create_signed_url + + +class HistoryType(Enum): + Activity = "Activity" + Observation = "Observation" + LocationChange = "LocationChange" + + +def get_meter_history(db: Session, meter_id: int): + activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.location), + joinedload(MeterActivities.submitting_user), + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used_links) + .joinedload(PartsUsed.part) + .joinedload(Parts.part_type), + joinedload(MeterActivities.notes), + joinedload(MeterActivities.services_performed), + ) + .filter(MeterActivities.meter_id == meter_id) + ) + .unique() + .all() + ) + + observations = db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.submitting_user), + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + joinedload(MeterObservations.location), + ) + .filter(MeterObservations.meter_id == meter_id) + ).all() + + formatted_history_items = [] + item_id = 0 + + for activity in activities: + activity.location.geom = None + activity_well = db.scalars( + select(Wells).where(Wells.location_id == activity.location_id) + ).first() + photos = [ + { + "id": photo.id, + "file_name": photo.file_name, + "url": create_signed_url(photo.gcs_path), + "uploaded_at": photo.uploaded_at, + } + for photo in activity.photos + ] + formatted_history_items.append( + { + "id": item_id, + "history_type": HistoryType.Activity, + "well": activity_well, + "location": activity.location, + "activity_type": activity.activity_type_id, + "date": activity.timestamp_start, + "history_item": activity, + "photos": photos, + } + ) + item_id += 1 + + for observation in observations: + observation.location.geom = None + observation_well = db.scalars( + select(Wells).where(Wells.location_id == observation.location_id) + ).first() + formatted_history_items.append( + { + "id": item_id, + "history_type": HistoryType.Observation, + "well": observation_well, + "location": observation.location, + "date": observation.timestamp, + "history_item": observation, + } + ) + item_id += 1 + + formatted_history_items.sort(key=lambda item: item["date"], reverse=True) + return formatted_history_items diff --git a/api/services/ose.py b/api/services/ose.py new file mode 100644 index 00000000..6cdc3750 --- /dev/null +++ b/api/services/ose.py @@ -0,0 +1,380 @@ +from datetime import datetime +import os + +from fastapi import HTTPException +from sqlalchemy import and_, select +from sqlalchemy.orm import Session, joinedload, selectinload + +from api.models.meter import ( + ActivityTypeLU, + MeterActivities, + MeterObservations, + MeterStatusLU, + Meters, + NoteTypeLU, + ObservedPropertyTypeLU, + ServiceTypeLU, + meterRegisters, +) +from api.models.well import Wells +from api.models.work_order import workOrders +from api.schemas import meter, ose + + +API_BASE_URL = os.getenv("API_BASE_URL", "") + + +def build_activity_photo_url(activity_id: int, photo_name: str) -> str: + return f"{API_BASE_URL}/activities/{activity_id}/photos/{photo_name}" + + +def _get_observations_for_activity( + activity_start: datetime, + activity_end: datetime, + meter_id: int, + observations: list[MeterObservations], +) -> list[ose.ObservationDTO]: + observation_list = [] + for observation in observations: + if ( + observation.timestamp >= activity_start + and observation.timestamp <= activity_end + and observation.meter_id == meter_id + ): + observation_list.append( + ose.ObservationDTO( + observation_time=observation.timestamp.time(), + observation_type=observation.observed_property.name, + measurement=observation.value, + units=observation.unit.name_short, + ) + ) + return observation_list + + +def _serialize_activity( + activity: MeterActivities, observations: list[MeterObservations] +) -> ose.ActivityDTO: + notes_strings = [note.note for note in activity.notes] + parts_used_strings = [ + f"{part.part_type.name} ({part.part_number})" for part in activity.parts_used_links + ] + services_performed_strings = [ + service.service_name for service in activity.services_performed + ] + activity_observations = _get_observations_for_activity( + activity.timestamp_start, + activity.timestamp_end, + activity.meter_id, + observations, + ) + well_ra_number = activity.well.ra_number if activity.well else None + well_ose_tag = activity.well.osetag if activity.well else None + meter_activity_photos = [ + ose.MeterActivityPhotoDTO( + name=photo.file_name, + url=build_activity_photo_url(activity.id, photo.file_name), + ) + for photo in (activity.photos or []) + ] + + return ose.ActivityDTO( + activity_id=activity.id, + ose_request_id=activity.work_order.ose_request_id if activity.work_order else None, + activity_type=activity.activity_type.name, + activity_start=activity.timestamp_start, + activity_end=activity.timestamp_end, + well_ra_number=well_ra_number, + well_ose_tag=well_ose_tag, + description=activity.description, + services=services_performed_strings, + notes=notes_strings, + parts_used=parts_used_strings, + observations=activity_observations, + meter_activity_photos=meter_activity_photos, + ) + + +def reorganize_history( + activities: list[MeterActivities], observations: list[MeterObservations] +) -> list[ose.DateHistoryDTO]: + history: dict[str, dict[str, list[MeterActivities]]] = {} + for activity in activities: + activity_date = activity.timestamp_start.strftime("%Y-%m-%d") + meter_serial = activity.meter.serial_number + history.setdefault(activity_date, {}).setdefault(meter_serial, []).append(activity) + + history_list: list[ose.DateHistoryDTO] = [] + for activity_date, meters in history.items(): + meter_history_list = [] + for meter_serial, meter_activities in meters.items(): + meter_history_list.append( + ose.MeterHistoryDTO( + serial_number=meter_serial, + activities=[ + _serialize_activity(activity, observations) + for activity in meter_activities + ], + ) + ) + history_list.append( + ose.DateHistoryDTO(date=activity_date, meters=meter_history_list) + ) + + return history_list + + +def get_shared_history( + db: Session, start_datetime: datetime, end_datetime: datetime +) -> list[ose.DateHistoryDTO]: + activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used_links), + joinedload(MeterActivities.meter), + joinedload(MeterActivities.work_order), + joinedload(MeterActivities.well), + selectinload(MeterActivities.photos), + ) + .filter( + and_( + MeterActivities.timestamp_end >= start_datetime, + MeterActivities.timestamp_end <= end_datetime, + MeterActivities.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + observations = ( + db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + joinedload(MeterObservations.meter), + ) + .filter( + and_( + MeterObservations.timestamp >= start_datetime, + MeterObservations.timestamp <= end_datetime, + MeterObservations.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + return reorganize_history(list(activities), list(observations)) + + +def get_maintenance_by_request_ids( + db: Session, ose_request_ids: list[int] | None +) -> list[ose.DateHistoryDTO]: + activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used), + joinedload(MeterActivities.meter).joinedload(Meters.well), + joinedload(MeterActivities.work_order), + selectinload(MeterActivities.photos), + ) + .join(workOrders) + .where( + and_( + workOrders.ose_request_id.in_(ose_request_ids), + MeterActivities.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + activities_list = list(activities) + if not activities_list: + return [] + + activities_start_date = min(activity.timestamp_start for activity in activities_list) + activities_end_date = max(activity.timestamp_end for activity in activities_list) + observations = ( + db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + joinedload(MeterObservations.meter), + ) + .filter( + and_( + MeterObservations.timestamp >= activities_start_date, + MeterObservations.timestamp <= activities_end_date, + MeterObservations.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + return reorganize_history(activities_list, list(observations)) + + +def get_meter_information(db: Session, serial_number: str) -> meter.PublicMeter: + query = select(Meters).options( + joinedload(Meters.meter_type), + joinedload(Meters.well).joinedload(Wells.location), + joinedload(Meters.status), + joinedload(Meters.meter_register).joinedload(meterRegisters.dial_units), + joinedload(Meters.meter_register).joinedload(meterRegisters.totalizer_units), + ) + meter = db.scalars(query.filter(Meters.serial_number == serial_number)).first() + + if not meter: + raise HTTPException(status_code=404, detail="Meter not found") + + return meter.PublicMeter( + serial_number=meter.serial_number, + status=meter.status.status_name, + well=meter.PublicMeter.PublicWell( + ra_number=meter.well.ra_number, + osetag=meter.well.osetag, + trss=meter.well.location.trss, + longitude=meter.well.location.longitude, + latitude=meter.well.location.latitude, + ) + if meter.well + else None, + notes=meter.notes, + meter_type=meter.PublicMeter.MeterType( + brand=meter.meter_type.brand, + model=meter.meter_type.model, + size=meter.meter_type.size, + ), + meter_register=meter.PublicMeter.MeterRegister( + ratio=meter.meter_register.ratio, + number_of_digits=meter.meter_register.number_of_digits, + decimal_digits=meter.meter_register.decimal_digits, + dial_units=meter.meter_register.dial_units.name, + totalizer_units=meter.meter_register.totalizer_units.name, + multiplier=meter.meter_register.multiplier, + ) + if meter.meter_register + else None, + ) + + +def get_disapproval_response( + db: Session, ose_request_id: int +) -> ose.DisapprovalStatus: + work_order = db.scalars( + select(workOrders) + .options(joinedload(workOrders.status)) + .where(workOrders.ose_request_id == ose_request_id) + ).first() + + if not work_order or not work_order.title.startswith("OSE Data Issue"): + raise HTTPException(status_code=404, detail="Work order not found") + + disapproval_activity = ose.ActivityDTO( + activity_id=99999, + activity_type="Disapproval", + activity_start=datetime.now(), + activity_end=datetime.now(), + well_ra_number=None, + well_ose_tag=None, + description="Not yet implemented, need activity ID in disapproval", + services=[], + notes=[], + parts_used=[], + observations=[], + ) + + new_activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used), + joinedload(MeterActivities.meter).joinedload(Meters.well), + joinedload(MeterActivities.work_order), + selectinload(MeterActivities.photos), + ) + .where(MeterActivities.work_order_id == work_order.id) + ) + .unique() + .all() + ) + + new_activities_dto = [] + for activity in new_activities: + observations = ( + db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + ) + .filter( + and_( + MeterObservations.timestamp >= activity.timestamp_start, + MeterObservations.timestamp <= activity.timestamp_end, + MeterObservations.meter_id == activity.meter_id, + MeterObservations.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + new_activities_dto.append(_serialize_activity(activity, list(observations))) + + return ose.DisapprovalStatus( + ose_request_id=work_order.ose_request_id, + status=work_order.status.name, + notes=work_order.notes, + disapproval_activity=disapproval_activity, + new_activities=new_activities_dto, + ) + + +def get_db_types(db: Session) -> meter.DBTypesForOSE: + return meter.DBTypesForOSE( + activity_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.name, description=item.description + ) + for item in db.scalars(select(ActivityTypeLU)).all() + ], + observed_property_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.name, description=item.description + ) + for item in db.scalars(select(ObservedPropertyTypeLU)).all() + ], + service_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.service_name, description=item.description + ) + for item in db.scalars(select(ServiceTypeLU)).all() + ], + note_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.note, description=item.details + ) + for item in db.scalars(select(NoteTypeLU)).all() + ], + meter_status_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.status_name, description=item.description + ) + for item in db.scalars(select(MeterStatusLU)).all() + ], + ) diff --git a/api/services/parts.py b/api/services/parts.py new file mode 100644 index 00000000..fddccfc4 --- /dev/null +++ b/api/services/parts.py @@ -0,0 +1,346 @@ +from datetime import date, datetime, time +from io import BytesIO +from pathlib import Path +from typing import Optional + +from fastapi import HTTPException +from jinja2 import Environment, FileSystemLoader, select_autoescape +from sqlalchemy import func, literal, select, union_all +from sqlalchemy.orm import Session, selectinload +from weasyprint import HTML + +from api.models.meter import MeterActivities, meterRegisters +from api.models.part import Parts, PartsAdded, PartsUsed +from api.schemas import parts + + +TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" +templates = Environment( + loader=FileSystemLoader(TEMPLATES_DIR), + autoescape=select_autoescape(["html", "xml"]), +) + + +def _part_count_subqueries(): + used_subq = ( + select( + PartsUsed.part_id.label("part_id"), + func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), + ) + .group_by(PartsUsed.part_id) + .subquery() + ) + added_subq = ( + select( + PartsAdded.part_id.label("part_id"), + func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), + ) + .group_by(PartsAdded.part_id) + .subquery() + ) + current_count = ( + Parts.initial_count + + func.coalesce(added_subq.c.added_sum, 0) + - func.coalesce(used_subq.c.used_sum, 0) + ).label("current_count") + return used_subq, added_subq, current_count + + +def build_part_history_response( + part_id: int, db: Session +) -> parts.PartHistoryResponse: + part = db.scalars(select(Parts).where(Parts.id == part_id)).first() + if not part: + raise HTTPException(status_code=404, detail="Part not found") + + added_q = select( + PartsAdded.id.label("ref_id"), + PartsAdded.part_id.label("part_id"), + PartsAdded.date.label("event_date"), + literal("added").label("event_type"), + PartsAdded.note.label("note"), + PartsAdded.count.label("delta"), + literal(None).label("work_order_id"), + ).where(PartsAdded.part_id == part_id) + + used_q = ( + select( + PartsUsed.id.label("ref_id"), + PartsUsed.part_id.label("part_id"), + MeterActivities.timestamp_start.label("event_date"), + literal("used").label("event_type"), + func.nullif(func.trim(MeterActivities.description), "").label("note"), + (-PartsUsed.count).label("delta"), + MeterActivities.work_order_id.label("work_order_id"), + ) + .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) + .where(PartsUsed.part_id == part_id) + ) + + events = union_all(added_q, used_q).subquery() + rows = db.execute( + select( + events.c.ref_id, + events.c.part_id, + events.c.event_date, + events.c.event_type, + events.c.note, + events.c.delta, + events.c.work_order_id, + ).order_by(events.c.event_date.asc(), events.c.ref_id.asc()) + ).all() + + running = int(part.initial_count) + history: list[parts.PartHistoryRow] = [ + parts.PartHistoryRow( + row_id=f"initial-{part_id}", + part_id=part_id, + event_date=datetime.min, + event_type="initial", + ref_id=None, + note="Initial count", + delta=0, + total_after=running, + work_order_id=None, + ) + ] + + for ref_id, pid, event_date, event_type, note, delta, work_order_id in rows: + if not isinstance(event_date, datetime): + event_date = datetime.combine(event_date, time.min) + running += int(delta) + history.append( + parts.PartHistoryRow( + row_id=f"{event_type}-{ref_id}", + part_id=pid, + event_date=event_date, + event_type=event_type, + ref_id=ref_id, + note=note, + delta=int(delta), + total_after=running, + work_order_id=work_order_id, + ) + ) + + return parts.PartHistoryResponse( + part_id=part.id, + part_number=part.part_number, + initial_count=part.initial_count, + current_count=running, + history=history, + ) + + +def list_parts(db: Session, in_use: Optional[bool] = None): + used_subq, added_subq, current_count = _part_count_subqueries() + stmt = ( + select(Parts, current_count) + .outerjoin(used_subq, used_subq.c.part_id == Parts.id) + .outerjoin(added_subq, added_subq.c.part_id == Parts.id) + .options(selectinload(Parts.part_type)) + ) + if in_use is not None: + stmt = stmt.where(Parts.in_use == in_use) + rows = db.execute(stmt).all() + results = [] + for part, curr in rows: + part.current_count = curr + results.append(part) + return results + + +def get_parts_used_summary(db: Session, from_date: date, to_date: date, parts: list[int]): + start_dt = datetime.combine(from_date, datetime.min.time()) + end_dt = datetime.combine(to_date, datetime.max.time()) + usage_subq = ( + db.query( + PartsUsed.part_id.label("used_part_id"), + func.coalesce(func.sum(PartsUsed.count), 0).label("quantity"), + ) + .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) + .filter( + MeterActivities.timestamp_start >= start_dt, + MeterActivities.timestamp_start <= end_dt, + PartsUsed.part_id.in_(parts), + ) + .group_by(PartsUsed.part_id) + .subquery() + ) + query = ( + db.query( + Parts.id.label("id"), + Parts.part_number, + Parts.description, + Parts.price, + func.coalesce(usage_subq.c.quantity, 0).label("quantity"), + ) + .outerjoin(usage_subq, Parts.id == usage_subq.c.used_part_id) + .filter(Parts.id.in_(parts)) + .order_by(Parts.part_number) + ) + results = [] + for row in query.all(): + price = float(row.price or 0) + quantity = int(row.quantity or 0) + results.append( + { + "id": row.id, + "part_number": row.part_number, + "description": row.description, + "price": price, + "quantity": quantity, + "total": price * quantity, + } + ) + return results + + +def build_parts_used_pdf(db: Session, from_date: date, to_date: date, parts: list[int]): + results = get_parts_used_summary(db, from_date, to_date, parts) + running_total = 0.0 + for row in results: + running_total += row["total"] + row["running_total"] = running_total + + html_content = templates.get_template("parts_used_report.html").render( + rows=results, + from_date=from_date, + to_date=to_date, + ) + pdf_io = BytesIO() + HTML(string=html_content).write_pdf(pdf_io) + pdf_io.seek(0) + return pdf_io + + +def get_part(db: Session, part_id: int): + used_subq, added_subq, current_count = _part_count_subqueries() + row = db.execute( + select(Parts, current_count) + .outerjoin(used_subq, used_subq.c.part_id == Parts.id) + .outerjoin(added_subq, added_subq.c.part_id == Parts.id) + .where(Parts.id == part_id) + .options(selectinload(Parts.part_type), selectinload(Parts.meter_types)) + ).first() + if not row: + return None + + selected_part, curr = row + selected_part.current_count = curr + returned_part = parts.Part.model_validate(selected_part) + + if selected_part.part_type.name == "Register": + register_details = db.scalars( + select(meterRegisters).where(meterRegisters.part_id == selected_part.id) + ).first() + register_details_obj = None + if register_details is not None: + register_details_obj = ( + parts.Register.register_details.model_validate(register_details) + ) + returned_part = parts.Register( + **returned_part.model_dump(exclude_unset=True), + register_settings=register_details_obj, + ) + return returned_part + + +def add_parts(db: Session, payload: parts.PartsAddRequest): + part = db.scalars(select(Parts).where(Parts.id == payload.part_id)).first() + if not part: + raise HTTPException(status_code=404, detail="Part not found") + + db.add( + PartsAdded( + part_id=payload.part_id, + count=payload.count, + date=payload.date, + note=payload.note, + ) + ) + db.commit() + + used_subq, added_subq, current_count = _part_count_subqueries() + row = db.execute( + select(Parts, current_count) + .outerjoin(used_subq, used_subq.c.part_id == Parts.id) + .outerjoin(added_subq, added_subq.c.part_id == Parts.id) + .where(Parts.id == payload.part_id) + .options(selectinload(Parts.part_type), selectinload(Parts.meter_types)) + ).first() + if not row: + raise HTTPException(status_code=404, detail="Part not found") + part_obj, curr = row + part_obj.current_count = curr + return part_obj + + +def patch_part_history( + db: Session, part_id: int, payload: parts.PartHistoryUpdateRequest +): + part = db.scalars(select(Parts).where(Parts.id == part_id)).first() + if not part: + raise HTTPException(status_code=404, detail="Part not found") + + for row in payload.rows: + normalized_note = row.note.strip() if row.note else None + if normalized_note == "": + normalized_note = None + + if row.event_type == "added": + if row.delta <= 0: + raise HTTPException( + status_code=422, + detail="Added parts rows must have a positive change.", + ) + added_row = db.scalars( + select(PartsAdded).where( + PartsAdded.id == row.ref_id, + PartsAdded.part_id == part_id, + ) + ).first() + if not added_row: + raise HTTPException(status_code=404, detail="Parts added row not found.") + added_row.count = row.delta + added_row.date = row.event_date.date() + added_row.note = normalized_note + continue + + if row.delta >= 0: + raise HTTPException( + status_code=422, + detail="Work order rows must have a negative change.", + ) + parts_used_row = db.scalars( + select(PartsUsed).where( + PartsUsed.id == row.ref_id, + PartsUsed.part_id == part_id, + ) + ).first() + if not parts_used_row: + raise HTTPException(status_code=404, detail="Parts used row not found.") + activity = db.scalars( + select(MeterActivities).where( + MeterActivities.id == parts_used_row.meter_activity_id + ) + ).first() + if not activity: + raise HTTPException( + status_code=404, + detail="Meter activity for parts used row not found.", + ) + duration = ( + activity.timestamp_end - activity.timestamp_start + if activity.timestamp_end and activity.timestamp_start + else None + ) + parts_used_row.count = abs(row.delta) + activity.timestamp_start = row.event_date + activity.description = normalized_note + activity.timestamp_end = ( + row.event_date + duration if duration is not None else row.event_date + ) + + db.commit() + return build_part_history_response(part_id, db) diff --git a/api/services/storage.py b/api/services/storage.py new file mode 100644 index 00000000..3cb2f9ff --- /dev/null +++ b/api/services/storage.py @@ -0,0 +1,154 @@ +import os +import uuid +from datetime import timedelta +from pathlib import Path + +from fastapi import HTTPException, UploadFile +from google.auth import default, impersonated_credentials +from google.cloud import storage +from sqlalchemy import select +from sqlalchemy.orm import Session + +from api.models.meter import MeterActivities, MeterActivityPhotos + + +BUCKET_NAME = os.getenv("GCP_BUCKET_NAME", "") +PHOTO_PREFIX = os.getenv("GCP_PHOTO_PREFIX", "") +PHOTO_JWT_EXPIRE_SECONDS = 600 +TARGET_SERVICE_ACCOUNT = ( + "pvacd-meterapp@waterdatainitiative-271000.iam.gserviceaccount.com" +) + + +def get_activity_photo_record( + db: Session, activity_id: int, photo_file_name: str +) -> MeterActivityPhotos: + photo = ( + db.query(MeterActivityPhotos) + .filter( + MeterActivityPhotos.meter_activity_id == activity_id, + MeterActivityPhotos.file_name == photo_file_name, + ) + .first() + ) + + if not photo: + raise HTTPException(status_code=404, detail="Photo not found for this activity") + + return photo + + +def open_activity_photo(photo: MeterActivityPhotos): + try: + client = storage.Client() + bucket = client.bucket(BUCKET_NAME) + blob = bucket.blob(photo.gcs_path) + + if not blob.exists(client=client): + raise HTTPException(status_code=404, detail="Photo file missing from storage") + + blob.reload(client=client) + content_type = blob.content_type or "application/octet-stream" + headers = {"Content-Disposition": f'inline; filename="{photo.file_name}"'} + return blob.open("rb"), content_type, headers + except HTTPException: + raise + except Exception: + raise HTTPException(status_code=500, detail="Failed to retrieve photo") + + +async def save_activity_photos( + db: Session, + meter_activity: MeterActivities, + photos: list[UploadFile], + max_photos_per_meter: int, +): + if not photos: + return + + bucket = storage.Client().bucket(BUCKET_NAME) + + for file in photos: + ext = Path(file.filename).suffix or ".jpg" + unique_name = f"{uuid.uuid4()}{ext}" + blob_path = f"{PHOTO_PREFIX}/{meter_activity.id}/{unique_name}" + blob = bucket.blob(blob_path) + contents = await file.read() + blob.upload_from_string(contents, content_type=file.content_type) + + db.add( + MeterActivityPhotos( + meter_activity_id=meter_activity.id, + file_name=unique_name, + gcs_path=blob_path, + ) + ) + + db.commit() + db.refresh(meter_activity) + enforce_activity_photo_retention( + db=db, + meter_id=meter_activity.meter_id, + max_photos_per_meter=max_photos_per_meter, + bucket=bucket, + ) + + +def enforce_activity_photo_retention( + db: Session, + meter_id: int, + max_photos_per_meter: int, + bucket=None, +): + all_photos = ( + db.query(MeterActivityPhotos) + .join(MeterActivities) + .filter(MeterActivities.meter_id == meter_id) + .order_by(MeterActivityPhotos.uploaded_at.desc()) + .all() + ) + + if len(all_photos) <= max_photos_per_meter: + return + + bucket = bucket or storage.Client().bucket(BUCKET_NAME) + for photo in all_photos[max_photos_per_meter:]: + try: + bucket.blob(photo.gcs_path).delete() + except Exception as exc: + print(f"Warning: failed to delete {photo.gcs_path} from GCS: {exc}") + db.delete(photo) + + db.commit() + + +def delete_activity_photos(db: Session, activity_id: int): + photos = db.scalars( + select(MeterActivityPhotos).where( + MeterActivityPhotos.meter_activity_id == activity_id + ) + ).all() + + bucket = storage.Client().bucket(BUCKET_NAME) + for photo in photos: + try: + bucket.blob(photo.gcs_path).delete() + except Exception as exc: + print(f"Failed to delete {photo.gcs_path} from bucket: {exc}") + + +def create_signed_url(blob_path: str) -> str: + source_creds, _ = default() + creds = impersonated_credentials.Credentials( + source_credentials=source_creds, + target_principal=TARGET_SERVICE_ACCOUNT, + target_scopes=["https://www.googleapis.com/auth/devstorage.read_only"], + lifetime=3600, + ) + storage_client = storage.Client(credentials=creds) + blob = storage_client.bucket(BUCKET_NAME).blob(blob_path) + return blob.generate_signed_url( + version="v4", + expiration=timedelta(seconds=PHOTO_JWT_EXPIRE_SECONDS), + method="GET", + ) diff --git a/api/services/well_measurements.py b/api/services/well_measurements.py new file mode 100644 index 00000000..a170706c --- /dev/null +++ b/api/services/well_measurements.py @@ -0,0 +1,467 @@ +from base64 import b64encode +from collections import defaultdict +from datetime import date, datetime +from io import BytesIO +from pathlib import Path +from typing import Any, Dict, Optional +from zoneinfo import ZoneInfo +import calendar +import json +import os +import re +import zlib + +import matplotlib +from google.cloud import storage +from jinja2 import Environment, FileSystemLoader, select_autoescape +from matplotlib.pyplot import close, figure +from sqlalchemy import and_, func, select +from sqlalchemy.orm import Session, joinedload +from weasyprint import HTML + +from api.models.meter import ObservedPropertyTypeLU +from api.models.well import WellMeasurements, Wells +from api.schemas import well + + +matplotlib.use("Agg") + +WOODPECKER_BUCKET_NAME = os.getenv("GCP_WOODPECKER_BUCKET_NAME", "") +TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" + +templates = Environment( + loader=FileSystemLoader(TEMPLATES_DIR), + autoescape=select_autoescape(["html", "xml"]), +) + +SP_JOHNSON_WELL_ID = 2599 +DEPTH_TO_WATER_SENSOR_NAME = "Depth to Water" +MONITORING_USE_TYPE_ID = 11 + + +def read_woodpecker_waterlevels( + well_id: int, +) -> list[well.WellMeasurementDTO]: + if well_id != SP_JOHNSON_WELL_ID: + raise ValueError("Invalid well ID") + + results: list[well.WellMeasurementDTO] = [] + seen_timestamps: set[str] = set() + + client = storage.Client() + bucket = client.bucket(WOODPECKER_BUCKET_NAME) + + for blob in bucket.list_blobs(): + if not blob.name.endswith(".json"): + continue + + payload = json.loads(blob.download_as_text()) + device_attributes = payload.get("deviceAttributes") or {} + tz_name = device_attributes.get("timeZone") or "UTC" + ra_number = device_attributes.get("wellId") or "" + + sensor_data = payload.get("sensorData") or [] + depth_sensor = next( + ( + sensor + for sensor in sensor_data + if (sensor.get("sensorName") or "").strip() == DEPTH_TO_WATER_SENSOR_NAME + ), + None, + ) + if not depth_sensor: + continue + + for measurement in depth_sensor.get("measurements") or []: + raw_ts = measurement.get("timestamp") + if not raw_ts: + continue + + ts = _parse_woodpecker_timestamp(raw_ts, tz_name) + ts_key = ts.isoformat() + if ts_key in seen_timestamps: + continue + seen_timestamps.add(ts_key) + + raw_value = measurement.get("data") + value = abs(raw_value) if raw_value is not None else None + + results.append( + well.WellMeasurementDTO( + id=_make_measurement_id(well_id, ts, value), + timestamp=ts, + value=value, + submitting_user=well.WellMeasurementDTO.UserDTO( + full_name="Woodpeckers" + ), + well=well.WellMeasurementDTO.WellDTO(ra_number=ra_number), + ) + ) + + results.sort(key=lambda item: item.timestamp) + return results + + +def _parse_woodpecker_timestamp(ts: str, tz_name: str) -> datetime: + dt_naive = datetime.strptime(ts, "%d/%m/%Y %H:%M:%S") + try: + tz = ZoneInfo(tz_name) + except Exception: + return dt_naive + return dt_naive.replace(tzinfo=tz) + + +def _make_measurement_id(well_id: int, ts: datetime, value: Optional[float]) -> int: + key = f"{well_id}|{ts.isoformat()}|{value if value is not None else 'null'}" + return zlib.crc32(key.encode("utf-8")) + + +def _group_and_average( + measurements: list[well.WellMeasurementDTO], + group_by_label: str, + ra_number: str, + synthetic_id_counter: int, +) -> tuple[list[well.WellMeasurementDTO], int]: + grouped: dict[str, list[float]] = defaultdict(list) + for measurement in measurements: + key = measurement.timestamp.strftime( + "%Y-%m" if group_by_label == "month" else "%Y-%m-%d" + ) + grouped[key].append(measurement.value) + + results: list[well.WellMeasurementDTO] = [] + for time_str, values in sorted(grouped.items()): + dt = datetime.strptime( + time_str, + "%Y-%m" if group_by_label == "month" else "%Y-%m-%d", + ) + results.append( + well.WellMeasurementDTO( + id=synthetic_id_counter, + timestamp=dt, + value=sum(values) / len(values), + submitting_user={"full_name": "System"}, + well={"ra_number": ra_number}, + ) + ) + synthetic_id_counter -= 1 + + return results, synthetic_id_counter + + +def _get_measurements_by_ids( + db: Session, + well_ids: list[int], + start: Optional[date], + end: Optional[date], +): + filters = [ + ObservedPropertyTypeLU.name == "Depth to water", + WellMeasurements.well_id.in_(well_ids), + ] + if start: + filters.append(WellMeasurements.timestamp >= start) + if end: + filters.append(WellMeasurements.timestamp <= datetime.combine(end, datetime.max.time())) + + stmt = ( + select(WellMeasurements) + .options( + joinedload(WellMeasurements.submitting_user), + joinedload(WellMeasurements.well), + ) + .join(ObservedPropertyTypeLU) + .where(and_(*filters)) + .order_by(WellMeasurements.well_id, WellMeasurements.timestamp) + ) + return db.scalars(stmt).all() + + +def read_waterlevels( + *, + db: Session, + well_ids: list[int], + from_date: Optional[date], + to_date: Optional[date], + is_averaging_all_wells: bool, + is_comparing_to_1970_average: bool, + comparison_year: Optional[str], +) -> list[well.WellMeasurementDTO]: + synthetic_id_counter = -1 + + group_by = None + if from_date and to_date: + group_by = "month" if (to_date - from_date).days >= 365 else "day" + + if not well_ids and not is_comparing_to_1970_average and not comparison_year: + return [] + + response_data: list[well.WellMeasurementDTO] = [] + + if is_averaging_all_wells and well_ids: + current_measurements = _get_measurements_by_ids(db, well_ids, from_date, to_date) + averaged, synthetic_id_counter = _group_and_average( + current_measurements, + group_by or "day", + "Average of wells", + synthetic_id_counter, + ) + response_data.extend(averaged) + + if not is_averaging_all_wells and well_ids: + response_data.extend(_get_measurements_by_ids(db, well_ids, from_date, to_date)) + + def add_year_average(year: int, label: str): + nonlocal synthetic_id_counter + if from_date and to_date and (to_date - from_date).days >= 365: + start = date(year, 1, 1) + end = date(year, 12, 31) + elif from_date and to_date: + start = date(year, from_date.month, 1) + end = date(year, to_date.month, calendar.monthrange(year, to_date.month)[1]) + else: + start = date(year, 1, 1) + end = date(year, 12, 31) + + monitoring_ids = [ + row[0] + for row in db.execute( + select(Wells.id).where(Wells.use_type_id == MONITORING_USE_TYPE_ID) + ).all() + ] + year_measurements = _get_measurements_by_ids(db, monitoring_ids, start, end) + averaged, synthetic_id_counter = _group_and_average( + year_measurements, + "month", + label, + synthetic_id_counter, + ) + response_data.extend(averaged) + + if is_comparing_to_1970_average: + add_year_average(1970, "1970 Average") + + if comparison_year: + try: + year_int = int(comparison_year) + except ValueError: + raise ValueError("comparisonYear must be a 4-digit year") + + current_year = datetime.now().year + if year_int < 1900 or year_int > current_year: + raise ValueError(f"comparisonYear must be between 1900 and {current_year}") + + if not (is_comparing_to_1970_average and year_int == 1970): + add_year_average(year_int, f"{year_int} Average") + + return response_data + + +def get_waterlevel_report_averages( + *, + well_ids: list[int], + from_date: Optional[date], + to_date: Optional[date], + db: Session, +) -> Dict[str, Any]: + if not well_ids: + return {"bucket": None, "per_well": [], "all_wells": []} + + if from_date is None and to_date is None: + return {"bucket": None, "per_well": [], "all_wells": []} + + start_dt = datetime.combine(from_date, datetime.min.time()) if from_date else None + end_dt = datetime.combine(to_date, datetime.max.time()) if to_date else None + + if from_date and to_date: + bucket_unit = "year" if (to_date - from_date).days >= 365 else "month" + else: + bucket_unit = "month" + + bucket = func.date_trunc(bucket_unit, WellMeasurements.timestamp).label("period_start") + base_filters = [ + ObservedPropertyTypeLU.name == "Depth to water", + WellMeasurements.well_id.in_(well_ids), + ] + if start_dt: + base_filters.append(WellMeasurements.timestamp >= start_dt) + if end_dt: + base_filters.append(WellMeasurements.timestamp <= end_dt) + + per_well_stmt = ( + select( + WellMeasurements.well_id.label("well_id"), + Wells.ra_number.label("ra_number"), + bucket, + func.avg(WellMeasurements.value).label("avg_value"), + ) + .join(Wells, Wells.id == WellMeasurements.well_id) + .join( + ObservedPropertyTypeLU, + ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, + ) + .where(and_(*base_filters)) + .group_by(WellMeasurements.well_id, Wells.ra_number, bucket) + .order_by(Wells.ra_number, bucket) + ) + + all_wells_stmt = ( + select(bucket, func.avg(WellMeasurements.value).label("avg_value")) + .join( + ObservedPropertyTypeLU, + ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, + ) + .where(and_(*base_filters)) + .group_by(bucket) + .order_by(bucket) + ) + + return { + "bucket": bucket_unit, + "per_well": [ + { + "well_id": row.well_id, + "ra_number": row.ra_number, + "period_start": row.period_start, + "avg_value": float(row.avg_value) if row.avg_value is not None else None, + } + for row in db.execute(per_well_stmt).all() + ], + "all_wells": [ + { + "period_start": row.period_start, + "avg_value": float(row.avg_value) if row.avg_value is not None else None, + } + for row in db.execute(all_wells_stmt).all() + ], + } + + +def build_waterlevels_pdf( + *, + db: Session, + well_ids: list[int], + from_date: date, + to_date: date, + is_averaging_all_wells: bool, + is_comparing_to_1970_average: bool, + comparison_year: Optional[str], +) -> BytesIO: + data = read_waterlevels( + db=db, + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + is_averaging_all_wells=is_averaging_all_wells, + is_comparing_to_1970_average=is_comparing_to_1970_average, + comparison_year=comparison_year, + ) + if not data: + raise LookupError("No water-level data found") + + from_year = from_date.year + shift_years = set() + if is_comparing_to_1970_average: + shift_years.add(1970) + if comparison_year: + try: + shift_years.add(int(comparison_year)) + except ValueError: + pass + + rows = [] + data_by_well = defaultdict(list) + for measurement in data: + ts = measurement.timestamp + value = measurement.value + ra_number = ( + measurement.well["ra_number"] + if isinstance(measurement.well, dict) + else measurement.well.ra_number + ) + + rows.append( + { + "timestamp": ts.strftime("%Y-%m-%d %H:%M"), + "depth_to_water": value, + "well_ra_number": ra_number, + } + ) + + chart_ts = ts + if from_year: + match = re.match(r"^(\d{4}) Average$", ra_number) + if match: + year = int(match.group(1)) + if year in shift_years: + chart_ts = _shift_year_safe(ts, from_year) + + data_by_well[ra_number].append((chart_ts, value)) + + chart_b64 = _make_line_chart(data_by_well, "Depth of Water over Time") + report_subtext = None + if is_averaging_all_wells: + num_wells = len(well_ids) + well_word = "WELL" if num_wells == 1 else "WELLS" + report_subtext = ( + f"MONTHLY AVERAGE WATER LEVEL WITHIN {num_wells} PVACD RECORDER {well_word}\n" + "AVERAGES TAKEN FROM STEEL TAPE MEASUREMENTS MADE\n" + "ON OR NEAR THE 5TH, 15TH AND 25TH OF EACH MONTH" + ) + + averages = get_waterlevel_report_averages( + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + db=db, + ) + html = templates.get_template("waterlevels_report.html").render( + from_date=from_date, + to_date=to_date, + observation_chart=chart_b64, + rows=rows, + report_title="ROSWELL ARTESIAN BASIN", + report_subtext=report_subtext, + averages=averages, + ) + + pdf_io = BytesIO() + HTML(string=html).write_pdf(pdf_io) + pdf_io.seek(0) + return pdf_io + + +def _shift_year_safe(dt: datetime, new_year: int): + try: + return dt.replace(year=new_year) + except ValueError: + last_day = calendar.monthrange(new_year, dt.month)[1] + return dt.replace(year=new_year, day=min(dt.day, last_day)) + + +def _make_line_chart(data: dict, title: str): + if not data: + return "" + fig = figure(figsize=(10, 6)) + ax = fig.add_subplot(111) + for ra_label, measurements in data.items(): + sorted_measurements = sorted(measurements, key=lambda item: item[0]) + timestamps = [ts for ts, _ in sorted_measurements] + values = [val for _, val in sorted_measurements] + ax.plot(timestamps, values, label=ra_label, marker="o") + ax.set_title(title) + ax.set_xlabel("Time") + ax.set_ylabel("Depth to Water") + ax.invert_yaxis() + fig.subplots_adjust(right=0.78) + ax.legend( + loc="center left", + bbox_to_anchor=(1.02, 0.5), + borderaxespad=0.0, + frameon=True, + ) + fig.autofmt_xdate() + buf = BytesIO() + fig.savefig(buf, format="png", bbox_inches="tight") + close(fig) + return b64encode(buf.getvalue()).decode("utf-8") diff --git a/api/services/work_orders.py b/api/services/work_orders.py new file mode 100644 index 00000000..37bdb32f --- /dev/null +++ b/api/services/work_orders.py @@ -0,0 +1,254 @@ +from datetime import datetime + +from fastapi import HTTPException +from sqlalchemy import or_, select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session, joinedload + +from api.models.meter import Meters, MeterActivities +from api.models.user import Users +from api.models.work_order import workOrders, workOrderStatusLU +from api.schemas import meter + + +def _work_order_query(): + return ( + select(workOrders) + .options( + joinedload(workOrders.status), + joinedload(workOrders.meter), + joinedload(workOrders.assigned_user), + ) + ) + + +def _load_associated_activities(db: Session, work_order_ids: list[int]): + if not work_order_ids: + return {} + + relevant_activities = db.scalars( + select(MeterActivities) + .options(joinedload(MeterActivities.location)) + .where(MeterActivities.work_order_id.in_(work_order_ids)) + ).all() + + activities_by_work_order = {} + for activity in relevant_activities: + activities_by_work_order.setdefault(activity.work_order_id, []).append( + { + "id": activity.id, + "timestamp_start": activity.timestamp_start, + "timestamp_end": activity.timestamp_end, + "description": activity.description, + "submitting_user_id": activity.submitting_user_id, + "meter_id": activity.meter_id, + "activity_type_id": activity.activity_type_id, + "location_id": activity.location_id, + "location_name": activity.location.name if activity.location else None, + "ose_share": activity.ose_share, + "water_users": activity.water_users, + } + ) + + return activities_by_work_order + + +def _serialize_work_order( + work_order: workOrders, + associated_activities: list[dict] | list[MeterActivities] | None = None, +) -> meter.WorkOrder: + return meter.WorkOrder( + work_order_id=work_order.id, + ose_request_id=work_order.ose_request_id, + date_created=work_order.date_created, + creator=work_order.creator, + meter_id=work_order.meter.id, + meter_serial=work_order.meter.serial_number, + title=work_order.title, + description=work_order.description, + status=work_order.status.name, + notes=work_order.notes, + assigned_user_id=work_order.assigned_user_id, + assigned_user=work_order.assigned_user.username + if work_order.assigned_user + else None, + associated_activities=associated_activities, + ) + + +def list_work_orders( + db: Session, + filter_by_status: list[str], + start_date: datetime, + work_order_id: list[int] | None = None, + assigned_user_id: int | None = None, + q: str | None = None, +): + stmt = ( + _work_order_query() + .join(workOrderStatusLU) + .where(workOrderStatusLU.name.in_(filter_by_status)) + .where(workOrders.date_created >= start_date) + ) + + if work_order_id: + stmt = stmt.where(workOrders.id.in_(work_order_id)) + + if assigned_user_id: + stmt = stmt.where(workOrders.assigned_user_id == assigned_user_id) + + if q: + q_like = f"%{q.strip()}%" + stmt = stmt.where( + or_( + workOrders.title.ilike(q_like), + workOrders.description.ilike(q_like), + workOrders.creator.ilike(q_like), + workOrders.notes.ilike(q_like), + workOrders.meter.has(Meters.serial_number.ilike(q_like)), + ) + ) + + work_order_rows = db.scalars(stmt).all() + activities_by_work_order = _load_associated_activities( + db, [work_order.id for work_order in work_order_rows] + ) + + return [ + { + "work_order_id": work_order.id, + "ose_request_id": work_order.ose_request_id, + "date_created": work_order.date_created, + "creator": work_order.creator, + "meter_id": work_order.meter.id, + "meter_serial": work_order.meter.serial_number, + "title": work_order.title, + "description": work_order.description, + "status": work_order.status.name, + "notes": work_order.notes, + "assigned_user_id": work_order.assigned_user_id, + "assigned_user": work_order.assigned_user.username + if work_order.assigned_user + else None, + "associated_activities": activities_by_work_order.get(work_order.id, []), + } + for work_order in work_order_rows + ] + + +def create_work_order( + db: Session, new_work_order: meter.CreateWorkOrder +) -> meter.WorkOrder: + open_status = db.scalars( + select(workOrderStatusLU).where(workOrderStatusLU.name == "Open") + ).first() + + work_order = workOrders( + date_created=new_work_order.date_created, + meter_id=new_work_order.meter_id, + title=new_work_order.title, + status_id=open_status.id, + ) + + if new_work_order.description: + work_order.description = new_work_order.description + if new_work_order.notes: + work_order.notes = new_work_order.notes + if new_work_order.assigned_user_id: + work_order.assigned_user_id = new_work_order.assigned_user_id + if new_work_order.creator: + work_order.creator = new_work_order.creator + if new_work_order.ose_request_id: + work_order.ose_request_id = new_work_order.ose_request_id + + try: + db.add(work_order) + db.commit() + except IntegrityError: + raise HTTPException( + status_code=409, detail="Title empty or already exists for this meter." + ) + + work_order = db.scalars(_work_order_query().where(workOrders.id == work_order.id)).first() + return _serialize_work_order(work_order) + + +def update_work_order( + db: Session, + patch_work_order_form: meter.PatchWorkOrder, + user: Users, +) -> meter.WorkOrder: + comparison_work_order = meter.PatchWorkOrder( + work_order_id=patch_work_order_form.work_order_id, + status=patch_work_order_form.status, + notes=patch_work_order_form.notes, + ) + + update_scope = "Technician" if comparison_work_order == patch_work_order_form else "Admin" + + if user.user_role.name not in [update_scope, "Admin"]: + raise HTTPException( + status_code=403, + detail="User does not have permission to update this work order.", + ) + + work_order = db.scalars( + _work_order_query().where(workOrders.id == patch_work_order_form.work_order_id) + ).first() + + if user.user_role.name == "Technician" and work_order.assigned_user_id != user.id: + raise HTTPException( + status_code=403, + detail="User does not have permission to update this work order.", + ) + + if patch_work_order_form.title == "": + raise HTTPException(status_code=422, detail="Title cannot be empty.") + + if patch_work_order_form.title: + work_order.title = patch_work_order_form.title + if patch_work_order_form.description: + work_order.description = patch_work_order_form.description + if patch_work_order_form.status: + new_status = db.scalars( + select(workOrderStatusLU).where( + workOrderStatusLU.name == patch_work_order_form.status + ) + ).first() + work_order.status_id = new_status.id + if patch_work_order_form.notes: + work_order.notes = patch_work_order_form.notes + if patch_work_order_form.creator: + work_order.creator = patch_work_order_form.creator + if patch_work_order_form.assigned_user_id: + work_order.assigned_user_id = patch_work_order_form.assigned_user_id + + try: + db.commit() + except IntegrityError: + raise HTTPException(status_code=409, detail="Title already exists for this meter.") + + work_order = db.scalars( + _work_order_query() + .join(workOrderStatusLU) + .where(workOrders.id == patch_work_order_form.work_order_id) + ).first() + associated_activities = db.scalars( + select(MeterActivities).where(MeterActivities.work_order_id == work_order.id) + ).all() + + return _serialize_work_order(work_order, associated_activities=list(associated_activities)) + + +def delete_work_order(db: Session, work_order_id: int): + work_order = db.scalars( + select(workOrders).where(workOrders.id == work_order_id) + ).first() + + if not work_order: + raise HTTPException(status_code=404, detail="Work order not found.") + + db.delete(work_order) + db.commit() + + return {"status": "success"} diff --git a/api/templates/chlorides_report.html b/api/templates/chlorides_report.html index 748c9e01..3e8472ce 100644 --- a/api/templates/chlorides_report.html +++ b/api/templates/chlorides_report.html @@ -46,39 +46,39 @@