From bc236d3fb86a85e5a0557e4fedd8982ef5c69af1 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 18 Mar 2026 23:35:30 -0500 Subject: [PATCH 01/22] feat(user_session): Add tracking --- api/main.py | 45 +- api/models/main_models.py | 54 + api/routes/user_sessions.py | 250 +++ api/schemas/security_schemas.py | 9 +- api/security.py | 23 +- api/session_tracking.py | 219 +++ frontend/src/components/CustomCardHeader.tsx | 16 +- frontend/src/components/Topbar.tsx | 6 +- frontend/src/hooks/useFetchWithAuth.ts | 4 + frontend/src/interfaces/UserSessions.ts | 33 + frontend/src/interfaces/index.ts | 1 + frontend/src/service/ApiServiceNew.ts | 4 + frontend/src/utils/SessionTracking.ts | 281 +++ frontend/src/views/Login.tsx | 22 +- frontend/src/views/Settings.tsx | 1513 ++++++++++------- ...070000_create_user_sessions_table.down.sql | 12 + ...18070000_create_user_sessions_table.up.sql | 70 + 17 files changed, 1960 insertions(+), 602 deletions(-) create mode 100644 api/routes/user_sessions.py create mode 100644 api/session_tracking.py create mode 100644 frontend/src/interfaces/UserSessions.ts create mode 100644 frontend/src/utils/SessionTracking.ts create mode 100644 migrations/20260318070000_create_user_sessions_table.down.sql create mode 100644 migrations/20260318070000_create_user_sessions_table.up.sql diff --git a/api/main.py b/api/main.py index 7c6edd7c..f90acc6e 100644 --- a/api/main.py +++ b/api/main.py @@ -1,5 +1,5 @@ from datetime import timedelta -from fastapi import FastAPI, Depends, HTTPException +from fastapi import FastAPI, Depends, HTTPException, Request from fastapi.security import OAuth2PasswordRequestForm from fastapi_pagination import add_pagination from fastapi.middleware.cors import CORSMiddleware @@ -18,6 +18,7 @@ from api.routes.OSE import ose_router from api.routes.parts import part_router from api.routes.settings import settings_router +from api.routes.user_sessions import user_sessions_router from api.routes.well_measurements import ( authenticated_well_measurement_router, public_well_measurement_router, @@ -28,8 +29,10 @@ create_access_token, ACCESS_TOKEN_EXPIRE_HOURS, authenticated_router, + get_session_identifier_from_token, ) -from api.session import get_db +from api.session import get_db, SessionLocal +from api.session_tracking import create_user_session, touch_user_session from sqlalchemy.orm import Session tags_metadata = [ @@ -85,7 +88,9 @@ @app.post("/token", response_model=security_schemas.Token, tags=["Login"]) def login_for_access_token( - form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_db) + request: Request, + form_data: OAuth2PasswordRequestForm = Depends(), + db: Session = Depends(get_db), ): user: Users = authenticate_user(form_data.username, form_data.password, db) if not user: @@ -102,9 +107,12 @@ def login_for_access_token( headers={"WWW-Authenticate": "Bearer"}, ) + user_session = create_user_session(db=db, user=user, request=request) + access_token = create_access_token( data={ "sub": user.username, + "sid": user_session.session_identifier, "scopes": list( map(lambda scope: scope.scope_string, user.user_role.security_scopes) ), @@ -112,8 +120,36 @@ def login_for_access_token( expires_delta=timedelta(hours=ACCESS_TOKEN_EXPIRE_HOURS), ) user_response = security_schemas.User(**user.__dict__) + db.commit() + + return { + "access_token": access_token, + "token_type": "bearer", + "user": user_response, + "session_identifier": user_session.session_identifier, + } + + +@app.middleware("http") +async def update_user_session_last_seen(request: Request, call_next): + authorization_header = request.headers.get("authorization") + if authorization_header and authorization_header.startswith("Bearer "): + token = authorization_header.removeprefix("Bearer ").strip() + session_identifier = None + + try: + session_identifier = get_session_identifier_from_token(token) + except Exception: + session_identifier = None + + if session_identifier: + db = SessionLocal() + try: + touch_user_session(db, session_identifier) + finally: + db.close() - return {"access_token": access_token, "token_type": "bearer", "user": user_response} + return await call_next(request) # ======================================= @@ -138,4 +174,5 @@ def login_for_access_token( app.include_router(public_chlorides_router) app.include_router(public_maintenance_router) app.include_router(public_well_measurement_router) +app.include_router(user_sessions_router) app.include_router(authenticated_router) diff --git a/api/models/main_models.py b/api/models/main_models.py index c289ced4..87c6b822 100644 --- a/api/models/main_models.py +++ b/api/models/main_models.py @@ -483,6 +483,60 @@ class Users(Base): back_populates="creator", foreign_keys="Notifications.created_by", ) + user_sessions: Mapped[List["UserSessions"]] = relationship( + "UserSessions", + back_populates="user", + cascade="all, delete-orphan", + ) + + +class SignOutReasonTypeLU(Base): + __tablename__ = "sign_out_reason_type_lu" + + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[Optional[str]] = mapped_column(String) + + user_sessions: Mapped[List["UserSessions"]] = relationship( + "UserSessions", back_populates="sign_out_reason_type" + ) + + +class UserSessions(Base): + __tablename__ = "user_sessions" + + user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True + ) + session_identifier: Mapped[str] = mapped_column( + String(36), nullable=False, unique=True, index=True + ) + ip_address: Mapped[Optional[str]] = mapped_column(String(255)) + user_agent: Mapped[Optional[str]] = mapped_column(String) + device_label: Mapped[Optional[str]] = mapped_column(String(255)) + device_type: Mapped[Optional[str]] = mapped_column(String(100)) + browser: Mapped[Optional[str]] = mapped_column(String(100)) + operating_system: Mapped[Optional[str]] = mapped_column(String(100)) + fingerprint_hash: Mapped[Optional[str]] = mapped_column(String(128), index=True) + signed_in_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + last_seen_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + signed_out_at: Mapped[Optional[DateTime]] = mapped_column(DateTime, index=True) + is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True) + sign_out_reason_type_id: Mapped[Optional[int]] = mapped_column( + Integer, + ForeignKey( + "sign_out_reason_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" + ), + index=True, + ) + + user: Mapped["Users"] = relationship("Users", back_populates="user_sessions") + sign_out_reason_type: Mapped[Optional["SignOutReasonTypeLU"]] = relationship( + "SignOutReasonTypeLU", back_populates="user_sessions" + ) class NotificationTypeLU(Base): diff --git a/api/routes/user_sessions.py b/api/routes/user_sessions.py new file mode 100644 index 00000000..69fc7bec --- /dev/null +++ b/api/routes/user_sessions.py @@ -0,0 +1,250 @@ +from collections import defaultdict +from datetime import datetime +from typing import Optional + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from starlette import status + +from api.models.main_models import UserSessions, Users +from api.schemas.base import ORMBase +from api.security import get_current_user, get_session_identifier_from_token, oauth2_scheme +from api.session import get_db +from api.session_tracking import mark_session_signed_out + +user_sessions_router = APIRouter(tags=["Login"]) + + +class SessionSignOutRequest(ORMBase): + sign_out_reason_name: str + fingerprint_hash: Optional[str] = None + + +class ExpiredSessionSignOutRequest(SessionSignOutRequest): + session_identifier: str + + +class UserSessionSummary(ORMBase): + session_identifier: str + device_label: str | None = None + device_type: str | None = None + browser: str | None = None + operating_system: str | None = None + ip_address: str | None = None + signed_in_at: datetime + last_seen_at: datetime + signed_out_at: datetime | None = None + is_active: bool + sign_out_reason_name: str | None = None + is_current: bool + + +class KnownDeviceSummary(ORMBase): + device_key: str + device_label: str | None = None + device_type: str | None = None + browser: str | None = None + operating_system: str | None = None + session_count: int + active_session_count: int + signed_in_at_first: datetime + last_seen_at: datetime + is_current_device: bool + + +class UserSessionsResponse(ORMBase): + current_session_identifier: str | None = None + sessions: list[UserSessionSummary] + known_devices: list[KnownDeviceSummary] + + +def serialize_session( + session: UserSessions, + *, + current_session_identifier: str | None, +) -> UserSessionSummary: + return UserSessionSummary( + session_identifier=session.session_identifier, + device_label=session.device_label, + device_type=session.device_type, + browser=session.browser, + operating_system=session.operating_system, + ip_address=session.ip_address, + signed_in_at=session.signed_in_at, + last_seen_at=session.last_seen_at, + signed_out_at=session.signed_out_at, + is_active=session.is_active, + sign_out_reason_name=( + session.sign_out_reason_type.name if session.sign_out_reason_type else None + ), + is_current=session.session_identifier == current_session_identifier, + ) + + +def get_known_device_key(session: UserSessions) -> str: + if session.fingerprint_hash: + return f"fingerprint:{session.fingerprint_hash}" + + fallback_parts = [ + session.device_label or "unknown-device", + session.browser or "unknown-browser", + session.operating_system or "unknown-os", + session.device_type or "unknown-type", + ] + return f"derived:{'|'.join(fallback_parts)}" + + +@user_sessions_router.get( + "/user-sessions", + response_model=UserSessionsResponse, +) +def list_user_sessions( + db: Session = Depends(get_db), + current_user: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + current_session_identifier = get_session_identifier_from_token(token) + sessions = ( + db.query(UserSessions) + .filter(UserSessions.user_id == current_user.id) + .order_by(UserSessions.last_seen_at.desc(), UserSessions.signed_in_at.desc()) + .all() + ) + + serialized_sessions = [ + serialize_session( + session, + current_session_identifier=current_session_identifier, + ) + for session in sessions + ] + + grouped_sessions: dict[str, list[UserSessions]] = defaultdict(list) + for session in sessions: + grouped_sessions[get_known_device_key(session)].append(session) + + known_devices: list[KnownDeviceSummary] = [] + for device_key, device_sessions in grouped_sessions.items(): + ordered_sessions = sorted( + device_sessions, + key=lambda session: (session.last_seen_at, session.signed_in_at), + reverse=True, + ) + newest_session = ordered_sessions[0] + known_devices.append( + KnownDeviceSummary( + device_key=device_key, + device_label=newest_session.device_label, + device_type=newest_session.device_type, + browser=newest_session.browser, + operating_system=newest_session.operating_system, + session_count=len(device_sessions), + active_session_count=sum( + 1 for session in device_sessions if session.is_active + ), + signed_in_at_first=min( + session.signed_in_at for session in device_sessions + ), + last_seen_at=max(session.last_seen_at for session in device_sessions), + is_current_device=any( + session.session_identifier == current_session_identifier + for session in device_sessions + ), + ) + ) + + known_devices.sort( + key=lambda device: (device.is_current_device, device.last_seen_at), + reverse=True, + ) + + return UserSessionsResponse( + current_session_identifier=current_session_identifier, + sessions=serialized_sessions, + known_devices=known_devices, + ) + + +@user_sessions_router.delete("/user-sessions/{session_identifier}") +def revoke_user_session( + session_identifier: str, + db: Session = Depends(get_db), + current_user: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + current_session_identifier = get_session_identifier_from_token(token) + if session_identifier == current_session_identifier: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The current session cannot be closed from this endpoint", + ) + + session = ( + db.query(UserSessions) + .filter( + UserSessions.session_identifier == session_identifier, + UserSessions.user_id == current_user.id, + ) + .first() + ) + if not session: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Session not found") + + mark_session_signed_out( + db, + session_identifier=session_identifier, + reason_name="forced_logout", + ) + db.commit() + + return { + "message": "Session closed", + "session_identifier": session_identifier, + } + + +@user_sessions_router.post("/logout") +def logout_current_session( + payload: SessionSignOutRequest, + db: Session = Depends(get_db), + _: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + session_identifier = get_session_identifier_from_token(token) + if not session_identifier: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Session identifier is missing from token", + ) + + session = mark_session_signed_out( + db, + session_identifier=session_identifier, + reason_name=payload.sign_out_reason_name, + fingerprint_hash=payload.fingerprint_hash, + ) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + db.commit() + + return {"message": "Session signed out", "session_identifier": session.session_identifier} + + +@user_sessions_router.post("/logout/expired") +def logout_expired_session( + payload: ExpiredSessionSignOutRequest, + db: Session = Depends(get_db), +): + session = mark_session_signed_out( + db, + session_identifier=payload.session_identifier, + reason_name=payload.sign_out_reason_name, + fingerprint_hash=payload.fingerprint_hash, + ) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + db.commit() + + return {"message": "Expired session recorded", "session_identifier": session.session_identifier} diff --git a/api/schemas/security_schemas.py b/api/schemas/security_schemas.py index 27201f8d..b99bb69a 100644 --- a/api/schemas/security_schemas.py +++ b/api/schemas/security_schemas.py @@ -51,10 +51,11 @@ class User(ORMBase): avatar_img: str | None = None -class Token(BaseModel): - access_token: str - token_type: str - user: User +class Token(BaseModel): + access_token: str + token_type: str + user: User + session_identifier: str | None = None class TokenData(ORMBase): diff --git a/api/security.py b/api/security.py index ddb0fb2d..ca0de900 100644 --- a/api/security.py +++ b/api/security.py @@ -110,7 +110,7 @@ def get_current_user( db: Annotated[Session, Depends(get_db)], ) -> Users: try: - payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + payload = decode_access_token(token) username: str = payload.get("sub") if username is None: @@ -130,6 +130,27 @@ def get_current_user( raise invalid_credentials_exception +def decode_access_token(token: str, verify_exp: bool = True) -> dict: + decode_options = None + if not verify_exp: + decode_options = {"verify_exp": False} + + return jwt.decode( + token, + SECRET_KEY, + algorithms=[ALGORITHM], + options=decode_options, + ) + + +def get_session_identifier_from_token( + token: str, verify_exp: bool = True +) -> str | None: + payload = decode_access_token(token, verify_exp=verify_exp) + session_identifier: str | None = payload.get("sid") + return session_identifier + + # Provide a list of scope_strings, recieve the current user if those scopes are present, raise auth exception if not def scoped_user(scopes): def get_user(current_user: Users = Security(get_current_user)): diff --git a/api/session_tracking.py b/api/session_tracking.py new file mode 100644 index 00000000..aa26be5f --- /dev/null +++ b/api/session_tracking.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Optional +from uuid import uuid4 + +from fastapi import Request +from sqlalchemy.orm import Session + +from api.models.main_models import SignOutReasonTypeLU, UserSessions, Users + +LAST_SEEN_UPDATE_INTERVAL = timedelta(minutes=5) + + +def normalize_header_value(value: Optional[str]) -> Optional[str]: + if value is None: + return None + + normalized = value.strip() + return normalized or None + + +def extract_client_ip(request: Request) -> Optional[str]: + forwarded_for = normalize_header_value(request.headers.get("x-forwarded-for")) + if forwarded_for: + return forwarded_for.split(",")[0].strip() + + real_ip = normalize_header_value(request.headers.get("x-real-ip")) + if real_ip: + return real_ip + + if request.client: + return request.client.host + + return None + + +def parse_browser(user_agent: Optional[str]) -> Optional[str]: + if not user_agent: + return None + + browser_patterns = [ + ("Edg/", "Microsoft Edge"), + ("OPR/", "Opera"), + ("Opera", "Opera"), + ("SamsungBrowser/", "Samsung Internet"), + ("CriOS/", "Chrome (iOS)"), + ("Chrome/", "Chrome"), + ("Chromium/", "Chromium"), + ("FxiOS/", "Firefox (iOS)"), + ("Firefox/", "Firefox"), + ("Version/", "Safari"), + ("MSIE ", "Internet Explorer"), + ("Trident/", "Internet Explorer"), + ] + + for token, browser_name in browser_patterns: + if token in user_agent: + return browser_name + + return "Unknown Browser" + + +def parse_operating_system(user_agent: Optional[str]) -> Optional[str]: + if not user_agent: + return None + + os_patterns = [ + ("Windows NT", "Windows"), + ("Android", "Android"), + ("iPhone", "iOS"), + ("iPad", "iPadOS"), + ("Mac OS X", "macOS"), + ("CrOS", "ChromeOS"), + ("Linux", "Linux"), + ] + + for token, os_name in os_patterns: + if token in user_agent: + return os_name + + return "Unknown OS" + + +def parse_device_type(user_agent: Optional[str]) -> Optional[str]: + if not user_agent: + return None + + lowered_user_agent = user_agent.lower() + if "ipad" in lowered_user_agent or "tablet" in lowered_user_agent: + return "Tablet" + if "mobile" in lowered_user_agent or "iphone" in lowered_user_agent: + return "Mobile" + + return "Desktop" + + +def build_device_label( + browser: Optional[str], operating_system: Optional[str], device_type: Optional[str] +) -> Optional[str]: + if browser and operating_system: + return f"{browser} on {operating_system}" + if browser and device_type: + return f"{browser} ({device_type})" + return browser or operating_system or device_type + + +def create_user_session(db: Session, user: Users, request: Request) -> UserSessions: + user_agent = normalize_header_value(request.headers.get("user-agent")) + browser = normalize_header_value(request.headers.get("x-browser")) or parse_browser( + user_agent + ) + operating_system = normalize_header_value( + request.headers.get("x-operating-system") + ) or parse_operating_system(user_agent) + device_type = normalize_header_value(request.headers.get("x-device-type")) or parse_device_type( + user_agent + ) + device_label = normalize_header_value(request.headers.get("x-device-label")) or build_device_label( + browser, operating_system, device_type + ) + fingerprint_hash = normalize_header_value( + request.headers.get("x-device-fingerprint") + ) + + session = UserSessions( + user_id=user.id, + session_identifier=str(uuid4()), + ip_address=extract_client_ip(request), + user_agent=user_agent, + device_label=device_label, + device_type=device_type, + browser=browser, + operating_system=operating_system, + fingerprint_hash=fingerprint_hash, + signed_in_at=datetime.utcnow(), + last_seen_at=datetime.utcnow(), + is_active=True, + ) + + db.add(session) + db.flush() + + return session + + +def get_sign_out_reason( + db: Session, reason_name: Optional[str] +) -> Optional[SignOutReasonTypeLU]: + normalized_reason_name = normalize_header_value(reason_name) or "unknown" + sign_out_reason = ( + db.query(SignOutReasonTypeLU) + .filter(SignOutReasonTypeLU.name == normalized_reason_name) + .first() + ) + + if sign_out_reason: + return sign_out_reason + + return ( + db.query(SignOutReasonTypeLU) + .filter(SignOutReasonTypeLU.name == "unknown") + .first() + ) + + +def mark_session_signed_out( + db: Session, + session_identifier: str, + reason_name: Optional[str], + fingerprint_hash: Optional[str] = None, +) -> Optional[UserSessions]: + session = ( + db.query(UserSessions) + .filter(UserSessions.session_identifier == session_identifier) + .first() + ) + if not session: + return None + + if fingerprint_hash and session.fingerprint_hash and session.fingerprint_hash != fingerprint_hash: + return None + + if session.signed_out_at is not None: + return session + + sign_out_reason = get_sign_out_reason(db, reason_name) + + session.signed_out_at = datetime.utcnow() + session.last_seen_at = session.signed_out_at + session.is_active = False + session.sign_out_reason_type_id = sign_out_reason.id if sign_out_reason else None + db.add(session) + + return session + + +def touch_user_session(db: Session, session_identifier: Optional[str]) -> None: + if not session_identifier: + return + + session = ( + db.query(UserSessions) + .filter( + UserSessions.session_identifier == session_identifier, + UserSessions.is_active.is_(True), + ) + .first() + ) + if not session: + return + + now = datetime.utcnow() + if session.last_seen_at and now - session.last_seen_at < LAST_SEEN_UPDATE_INTERVAL: + return + + session.last_seen_at = now + db.add(session) + db.commit() diff --git a/frontend/src/components/CustomCardHeader.tsx b/frontend/src/components/CustomCardHeader.tsx index 24667416..eb9df1db 100644 --- a/frontend/src/components/CustomCardHeader.tsx +++ b/frontend/src/components/CustomCardHeader.tsx @@ -24,10 +24,9 @@ export const CustomCardHeader: React.FC = ({ = ({ fontSize: "1.1rem", }} > - - {title} - {Icon && ( )} + + {title} + } sx={{ diff --git a/frontend/src/components/Topbar.tsx b/frontend/src/components/Topbar.tsx index 8f5a6fba..a5cef4c5 100644 --- a/frontend/src/components/Topbar.tsx +++ b/frontend/src/components/Topbar.tsx @@ -36,6 +36,7 @@ import { import { BgColor } from "@/constants"; import { useIsActiveRoute } from "@/hooks"; import { useGetUnreadNotificationCount } from "@/service"; +import { clearTrackedSession, notifyTrackedLogout } from "@/utils/SessionTracking"; export const Topbar = ({ open, @@ -108,9 +109,12 @@ export const Topbar = ({ handlePublicMenuOpen(event); }; - const fullSignOut = () => { + const fullSignOut = async () => { + await notifyTrackedLogout("manual_logout"); navigate({ to: "/", search: {} }); localStorage.removeItem("loggedIn"); + localStorage.removeItem("_auth"); + clearTrackedSession(); signOut(); }; diff --git a/frontend/src/hooks/useFetchWithAuth.ts b/frontend/src/hooks/useFetchWithAuth.ts index 7fb702bc..8d654608 100644 --- a/frontend/src/hooks/useFetchWithAuth.ts +++ b/frontend/src/hooks/useFetchWithAuth.ts @@ -4,6 +4,7 @@ import { formatQueryParams } from "@/utils"; import { enqueueSnackbar } from "notistack"; import { HttpStatus } from "@/enums"; import { API_URL } from "@/config"; +import { clearTrackedSession, notifyTrackedLogout } from "@/utils/SessionTracking"; export const useFetchWithAuth = () => { const authHeader = useAuthHeader(); @@ -50,7 +51,10 @@ export const useFetchWithAuth = () => { response.status === HttpStatus.LOGIN_TIMEOUT && localStorage.getItem("loggedIn") ) { + void notifyTrackedLogout("session_expired"); localStorage.removeItem("loggedIn"); + localStorage.removeItem("_auth"); + clearTrackedSession(); navigate({ to: "/" }); signOut(); enqueueSnackbar("Session expired. Please log in to continue.", { diff --git a/frontend/src/interfaces/UserSessions.ts b/frontend/src/interfaces/UserSessions.ts new file mode 100644 index 00000000..0b755ff5 --- /dev/null +++ b/frontend/src/interfaces/UserSessions.ts @@ -0,0 +1,33 @@ +export interface UserSessionSummary { + session_identifier: string; + device_label?: string | null; + device_type?: string | null; + browser?: string | null; + operating_system?: string | null; + ip_address?: string | null; + signed_in_at: string; + last_seen_at: string; + signed_out_at?: string | null; + is_active: boolean; + sign_out_reason_name?: string | null; + is_current: boolean; +} + +export interface KnownDeviceSummary { + device_key: string; + device_label?: string | null; + device_type?: string | null; + browser?: string | null; + operating_system?: string | null; + session_count: number; + active_session_count: number; + signed_in_at_first: string; + last_seen_at: string; + is_current_device: boolean; +} + +export interface UserSessionsResponse { + current_session_identifier?: string | null; + sessions: UserSessionSummary[]; + known_devices: KnownDeviceSummary[]; +} diff --git a/frontend/src/interfaces/index.ts b/frontend/src/interfaces/index.ts index b671acd0..2252e04a 100644 --- a/frontend/src/interfaces/index.ts +++ b/frontend/src/interfaces/index.ts @@ -66,6 +66,7 @@ export * from "./Unit"; export * from "./UpdatedUserPassword"; export * from "./User"; export * from "./UserRole"; +export * from "./UserSessions"; export * from "./WaterLevelQueryParams"; export * from "./WaterSource"; export * from "./Well"; diff --git a/frontend/src/service/ApiServiceNew.ts b/frontend/src/service/ApiServiceNew.ts index fb8597cc..12fa8b60 100644 --- a/frontend/src/service/ApiServiceNew.ts +++ b/frontend/src/service/ApiServiceNew.ts @@ -62,6 +62,7 @@ import { IncreaseQuantityPayload } from "@/interfaces"; import { WorkOrderStatus } from "@/enums"; import { API_URL } from "@/config"; import { useNavigate } from "@tanstack/react-router"; +import { clearTrackedSession, notifyTrackedLogout } from "@/utils/SessionTracking"; import { PartHistoryResponse, UpdatePartHistoryPayload, @@ -209,7 +210,10 @@ async function GETFetch( if (!response.ok) { // If backend indicates that user's token is expired, log them out and notify if (response.status == 440 && localStorage.getItem("loggedIn")) { + void notifyTrackedLogout("session_expired"); localStorage.removeItem("loggedIn"); + localStorage.removeItem("_auth"); + clearTrackedSession(); navigate({ to: "/" }); signOut(); enqueueSnackbar("Your session has expired, please login again.", { diff --git a/frontend/src/utils/SessionTracking.ts b/frontend/src/utils/SessionTracking.ts new file mode 100644 index 00000000..5ed48494 --- /dev/null +++ b/frontend/src/utils/SessionTracking.ts @@ -0,0 +1,281 @@ +import { API_URL } from "@/config"; + +const SESSION_IDENTIFIER_STORAGE_KEY = "wmdb_session_identifier"; +const SESSION_FINGERPRINT_STORAGE_KEY = "wmdb_session_fingerprint"; + +export type SessionTrackingMetadata = { + browser: string; + operatingSystem: string; + deviceType: string; + deviceLabel: string; + fingerprintHash: string; +}; + +let sessionTrackingMetadataPromise: Promise | null = null; + +function parseBrowser(userAgent: string) { + const browserMatchers: Array<[RegExp, string]> = [ + [/Edg\//, "Microsoft Edge"], + [/OPR\//, "Opera"], + [/SamsungBrowser\//, "Samsung Internet"], + [/CriOS\//, "Chrome (iOS)"], + [/Chrome\//, "Chrome"], + [/Chromium\//, "Chromium"], + [/FxiOS\//, "Firefox (iOS)"], + [/Firefox\//, "Firefox"], + [/Version\/.*Safari\//, "Safari"], + ]; + + return browserMatchers.find(([pattern]) => pattern.test(userAgent))?.[1] ?? "Unknown Browser"; +} + +function parseOperatingSystem(userAgent: string) { + const operatingSystemMatchers: Array<[RegExp, string]> = [ + [/Windows NT/i, "Windows"], + [/Android/i, "Android"], + [/iPhone/i, "iOS"], + [/iPad/i, "iPadOS"], + [/Mac OS X/i, "macOS"], + [/CrOS/i, "ChromeOS"], + [/Linux/i, "Linux"], + ]; + + return ( + operatingSystemMatchers.find(([pattern]) => pattern.test(userAgent))?.[1] ?? + "Unknown OS" + ); +} + +function parseDeviceType(userAgent: string) { + if (/iPad|Tablet/i.test(userAgent)) return "Tablet"; + if (/Mobile|iPhone|Android/i.test(userAgent)) return "Mobile"; + return "Desktop"; +} + +function buildDeviceLabel(browser: string, operatingSystem: string) { + return `${browser} on ${operatingSystem}`; +} + +function toHex(buffer: ArrayBuffer) { + return Array.from(new Uint8Array(buffer)) + .map((value) => value.toString(16).padStart(2, "0")) + .join(""); +} + +async function hashString(value: string) { + if (!window.crypto?.subtle) { + return Array.from(value) + .map((character) => character.charCodeAt(0).toString(16).padStart(2, "0")) + .join("") + .slice(0, 128); + } + + const encodedValue = new TextEncoder().encode(value); + const digest = await window.crypto.subtle.digest("SHA-256", encodedValue); + return toHex(digest); +} + +function getCanvasFingerprint() { + try { + const canvas = document.createElement("canvas"); + const context = canvas.getContext("2d"); + if (!context) return "canvas-unavailable"; + + context.textBaseline = "top"; + context.font = "14px Arial"; + context.fillStyle = "#1b4d89"; + context.fillRect(2, 2, 80, 20); + context.fillStyle = "#f5f5f5"; + context.fillText("WaterManagerDB", 4, 4); + context.strokeStyle = "#ff7a59"; + context.arc(60, 30, 20, 0, Math.PI * 2); + context.stroke(); + + return canvas.toDataURL(); + } catch { + return "canvas-error"; + } +} + +function getWebGLFingerprint() { + try { + const canvas = document.createElement("canvas"); + const gl = + canvas.getContext("webgl") || canvas.getContext("experimental-webgl"); + + if (!gl || !(gl instanceof WebGLRenderingContext)) { + return { + vendor: "webgl-unavailable", + renderer: "webgl-unavailable", + }; + } + + const debugInfo = gl.getExtension("WEBGL_debug_renderer_info"); + return { + vendor: debugInfo + ? gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL) + : "vendor-unavailable", + renderer: debugInfo + ? gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL) + : "renderer-unavailable", + }; + } catch { + return { + vendor: "webgl-error", + renderer: "webgl-error", + }; + } +} + +async function computeFingerprintHash() { + const webglFingerprint = getWebGLFingerprint(); + const navigatorWithDeviceMemory = navigator as Navigator & { + deviceMemory?: number; + }; + + const fingerprintPayload = { + userAgent: navigator.userAgent, + language: navigator.language, + languages: navigator.languages, + platform: navigator.platform, + vendor: navigator.vendor, + hardwareConcurrency: navigator.hardwareConcurrency, + deviceMemory: navigatorWithDeviceMemory.deviceMemory ?? null, + maxTouchPoints: navigator.maxTouchPoints, + cookieEnabled: navigator.cookieEnabled, + doNotTrack: navigator.doNotTrack, + webdriver: navigator.webdriver, + timezone: Intl.DateTimeFormat().resolvedOptions().timeZone, + timezoneOffset: new Date().getTimezoneOffset(), + screen: { + width: window.screen.width, + height: window.screen.height, + availWidth: window.screen.availWidth, + availHeight: window.screen.availHeight, + colorDepth: window.screen.colorDepth, + pixelDepth: window.screen.pixelDepth, + }, + colorSchemeDark: window.matchMedia?.("(prefers-color-scheme: dark)").matches ?? null, + reducedMotion: window.matchMedia?.("(prefers-reduced-motion: reduce)").matches ?? null, + localStorage: typeof window.localStorage !== "undefined", + sessionStorage: typeof window.sessionStorage !== "undefined", + indexedDb: typeof window.indexedDB !== "undefined", + plugins: Array.from(navigator.plugins ?? []).map((plugin) => plugin.name), + mimeTypes: Array.from(navigator.mimeTypes ?? []).map((mimeType) => mimeType.type), + canvas: getCanvasFingerprint(), + webgl: webglFingerprint, + }; + + return hashString(JSON.stringify(fingerprintPayload)); +} + +export async function collectSessionTrackingMetadata(): Promise { + if (sessionTrackingMetadataPromise) { + return sessionTrackingMetadataPromise; + } + + sessionTrackingMetadataPromise = (async () => { + try { + const browser = parseBrowser(navigator.userAgent); + const operatingSystem = parseOperatingSystem(navigator.userAgent); + const deviceType = parseDeviceType(navigator.userAgent); + const deviceLabel = buildDeviceLabel(browser, operatingSystem); + const fingerprintHash = await computeFingerprintHash(); + + return { + browser, + operatingSystem, + deviceType, + deviceLabel, + fingerprintHash, + }; + } catch { + return { + browser: "Unknown Browser", + operatingSystem: "Unknown OS", + deviceType: "Unknown Device", + deviceLabel: "Unknown Browser on Unknown OS", + fingerprintHash: "fingerprint-unavailable", + }; + } + })(); + + return sessionTrackingMetadataPromise; +} + +export function buildSessionTrackingHeaders(metadata: SessionTrackingMetadata) { + return { + "X-Device-Fingerprint": metadata.fingerprintHash, + "X-Browser": metadata.browser, + "X-Operating-System": metadata.operatingSystem, + "X-Device-Type": metadata.deviceType, + "X-Device-Label": metadata.deviceLabel, + }; +} + +export function persistTrackedSession( + sessionIdentifier: string, + fingerprintHash: string, +) { + window.localStorage.setItem(SESSION_IDENTIFIER_STORAGE_KEY, sessionIdentifier); + window.localStorage.setItem(SESSION_FINGERPRINT_STORAGE_KEY, fingerprintHash); +} + +export function clearTrackedSession() { + window.localStorage.removeItem(SESSION_IDENTIFIER_STORAGE_KEY); + window.localStorage.removeItem(SESSION_FINGERPRINT_STORAGE_KEY); +} + +export function getTrackedSession() { + const sessionIdentifier = window.localStorage.getItem( + SESSION_IDENTIFIER_STORAGE_KEY, + ); + const fingerprintHash = window.localStorage.getItem( + SESSION_FINGERPRINT_STORAGE_KEY, + ); + + if (!sessionIdentifier) return null; + + return { + sessionIdentifier, + fingerprintHash, + }; +} + +export async function notifyTrackedLogout(reasonName: string) { + const trackedSession = getTrackedSession(); + const authToken = window.localStorage.getItem("_auth"); + + try { + if (reasonName !== "session_expired" && authToken) { + await fetch(`${API_URL}/logout`, { + method: "POST", + headers: { + Authorization: `Bearer ${authToken}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + sign_out_reason_name: reasonName, + fingerprint_hash: trackedSession?.fingerprintHash ?? null, + }), + }); + return; + } + + if (trackedSession) { + await fetch(`${API_URL}/logout/expired`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + session_identifier: trackedSession.sessionIdentifier, + sign_out_reason_name: reasonName, + fingerprint_hash: trackedSession.fingerprintHash ?? null, + }), + }); + } + } catch { + // Best effort only. Local sign-out should continue even if audit logging fails. + } +} diff --git a/frontend/src/views/Login.tsx b/frontend/src/views/Login.tsx index 6651bf33..c5551451 100644 --- a/frontend/src/views/Login.tsx +++ b/frontend/src/views/Login.tsx @@ -22,6 +22,11 @@ import { enqueueSnackbar } from "notistack"; import { SecurityScope } from "@/interfaces"; import { API_URL } from "@/config"; import { CustomCardHeader } from "@/components"; +import { + buildSessionTrackingHeaders, + collectSessionTrackingMetadata, + persistTrackedSession, +} from "@/utils/SessionTracking"; export const Login = () => { const [loginIdentifier, setLoginIdentifier] = useState(""); @@ -34,15 +39,21 @@ export const Login = () => { const authUser = useAuthUser(); const navigate = useNavigate(); - const handleSubmit = (event: React.FormEvent) => { + const handleSubmit = async (event: React.FormEvent) => { event.preventDefault(); const body = new FormData(); body.append("username", loginIdentifier); body.append("password", password); - fetch(`${API_URL}/token`, { method: "POST", body }) - .then(handleLogin) + const sessionTrackingMetadata = await collectSessionTrackingMetadata(); + + fetch(`${API_URL}/token`, { + method: "POST", + body, + headers: buildSessionTrackingHeaders(sessionTrackingMetadata), + }) + .then((res) => handleLogin(res, sessionTrackingMetadata.fingerprintHash)) .catch((_) => { setError( "Unable to connect to the server. Please check your internet connection and try again. If the issue persists, contact support.", @@ -56,7 +67,7 @@ export const Login = () => { } }, [isAuthenticated, navigate]); - function handleLogin(res: Response) { + function handleLogin(res: Response, fingerprintHash: string) { if (res.status === 200) { res.json().then((data) => { if ( @@ -80,6 +91,9 @@ export const Login = () => { ) { localStorage.setItem("_auth", data.access_token); localStorage.setItem("loggedIn", "true"); + if (data.session_identifier) { + persistTrackedSession(data.session_identifier, fingerprintHash); + } navigate({ to: data.user.redirect_page ?? "/" }); } else { setError("Invalid username, email, or password. Please try again."); diff --git a/frontend/src/views/Settings.tsx b/frontend/src/views/Settings.tsx index f03bfdbe..50ea875d 100644 --- a/frontend/src/views/Settings.tsx +++ b/frontend/src/views/Settings.tsx @@ -1,33 +1,45 @@ -import { useEffect, useState } from "react"; +import { useEffect, useMemo, useState } from "react"; import * as yup from "yup"; import { enqueueSnackbar } from "notistack"; import { yupResolver } from "@hookform/resolvers/yup"; -import { useForm, Controller } from "react-hook-form"; +import { Controller, useForm } from "react-hook-form"; import { + Alert, + Avatar, + Box, + Button, Card, CardContent, + Chip, Divider, - Typography, - Box, - MenuItem, - TextField, Grid, + IconButton, + InputAdornment, ListItemIcon, - Chip, - Accordion, - AccordionSummary, - AccordionDetails, - Button, - ListSubheader, + MenuItem, Skeleton, - IconButton, Stack, - InputAdornment, + TextField, + Typography, } from "@mui/material"; -import SettingsIcon from "@mui/icons-material/Settings"; +import { alpha } from "@mui/material/styles"; +import { + Check, + CheckCircleOutline, + DeleteOutline, + DevicesRounded, + Edit, + HistoryRounded, + LaptopMacRounded, + PhoneIphoneRounded, + Settings as SettingsIcon, + SettingsApplications, + ShieldOutlined, + TabletMacRounded, + Visibility, + VisibilityOff, +} from "@mui/icons-material"; import { useAuthUser, useSignIn } from "react-auth-kit"; -import { Check, Close, Delete, Edit, ExpandMore } from "@mui/icons-material"; -import { Visibility, VisibilityOff } from "@mui/icons-material"; import { useMutation, useQuery, useQueryClient } from "react-query"; import { BackgroundBox, @@ -38,14 +50,14 @@ import { } from "@/components"; import { navConfig } from "@/constants"; import { useFetchWithAuth } from "@/hooks"; -import { SecurityScope } from "@/interfaces"; +import { + KnownDeviceSummary, + SecurityScope, + UserSessionSummary, + UserSessionsResponse, +} from "@/interfaces"; import { clearSavedQueryLocalStorage } from "@/service"; - -const redirectOptions = { - public: navConfig.filter((item) => !item.role), - technician: navConfig.filter((item) => item.role === "Technician"), - admin: navConfig.filter((item) => item.role === "Admin"), -}; +import { getTrackedSession } from "@/utils/SessionTracking"; const redirectSchema = yup.object().shape({ redirect_page: yup.string().required("Please select a redirect page"), @@ -63,11 +75,347 @@ const passwordSchema = yup.object().shape({ .required("Please confirm new password"), }); +function formatDateTime(value?: string | null) { + if (!value) return "Not available"; + + return new Intl.DateTimeFormat("en-US", { + dateStyle: "medium", + timeStyle: "short", + }).format(new Date(value)); +} + +function formatRelativeTime(value?: string | null) { + if (!value) return "Unknown"; + + const timestamp = new Date(value).getTime(); + const diffMs = timestamp - Date.now(); + const absMinutes = Math.round(Math.abs(diffMs) / (1000 * 60)); + + if (absMinutes < 1) return "Just now"; + if (absMinutes < 60) { + return `${absMinutes} minute${absMinutes === 1 ? "" : "s"} ${ + diffMs >= 0 ? "from now" : "ago" + }`; + } + + const absHours = Math.round(absMinutes / 60); + if (absHours < 24) { + return `${absHours} hour${absHours === 1 ? "" : "s"} ${ + diffMs >= 0 ? "from now" : "ago" + }`; + } + + const absDays = Math.round(absHours / 24); + return `${absDays} day${absDays === 1 ? "" : "s"} ${ + diffMs >= 0 ? "from now" : "ago" + }`; +} + +function formatReasonLabel(value?: string | null) { + if (!value) return ""; + return value.split("_").join(" "); +} + +function getDeviceIcon(deviceType?: string | null) { + switch (deviceType) { + case "Mobile": + return PhoneIphoneRounded; + case "Tablet": + return TabletMacRounded; + default: + return LaptopMacRounded; + } +} + +function InfoTile({ + label, + value, + compact = false, +}: { + label: string; + value: React.ReactNode; + compact?: boolean; +}) { + return ( + + + {label} + + {value} + + ); +} + +function SectionCard({ + title, + description, + icon: Icon, + children, +}: { + title: string; + description: string; + icon: typeof ShieldOutlined; + children: React.ReactNode; +}) { + return ( + + + + + + + + + + {title} + + + {description} + + + + {children} + + + + ); +} + +function SessionRow({ + session, + onCloseSession, + isClosing, +}: { + session: UserSessionSummary; + onCloseSession: (sessionIdentifier: string) => void; + isClosing: boolean; +}) { + const DeviceIcon = getDeviceIcon(session.device_type); + const statusColor = session.is_active ? "success" : "default"; + + return ( + + + + + + + + + + {session.device_label || "Unknown device"} + + + {[session.browser, session.operating_system, session.ip_address] + .filter(Boolean) + .join(" • ") || "No device details available"} + + + + + {session.is_current ? ( + } + label="Current device" + /> + ) : null} + + + + + + + + Signed in + + {formatDateTime(session.signed_in_at)} + + + + Last seen + + + {formatDateTime(session.last_seen_at)} ({formatRelativeTime(session.last_seen_at)}) + + + + + Sign-out status + + + {session.signed_out_at + ? `${formatDateTime(session.signed_out_at)}${ + session.sign_out_reason_name + ? ` • ${formatReasonLabel(session.sign_out_reason_name)}` + : "" + }` + : "Still active"} + + + + + + + + + + ); +} + +function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { + const DeviceIcon = getDeviceIcon(device.device_type); + + return ( + + + + + + + + + + {device.device_label || "Unknown device"} + + + {[device.browser, device.operating_system, device.device_type] + .filter(Boolean) + .join(" • ") || "No device details available"} + + + + {device.is_current_device ? ( + } label="Current device" /> + ) : null} + + + + + + Sessions + + {device.session_count} + + + + Active now + + {device.active_session_count} + + + + First seen + + + {formatDateTime(device.signed_in_at_first)} + + + + + Last seen + + {formatDateTime(device.last_seen_at)} + + + + + ); +} + export const Settings = () => { const authUser = useAuthUser(); const user = authUser(); const signIn = useSignIn(); const fetchWithAuth = useFetchWithAuth(); + const queryClient = useQueryClient(); + const trackedSession = getTrackedSession(); + const scopes: Set = new Set( authUser()?.user_role?.security_scopes?.map( (scope: SecurityScope) => scope.scope_string, @@ -76,6 +424,16 @@ export const Settings = () => { const hasReadScope = scopes.has("read"); const hasAdminScope = scopes.has("admin"); + const redirectOptions = useMemo( + () => + navConfig.filter((item) => { + if (!item.role) return true; + if (item.role === "Technician") return hasReadScope; + if (item.role === "Admin") return hasAdminScope; + return false; + }), + [hasAdminScope, hasReadScope], + ); const [isEditing, setIsEditing] = useState(false); const [avatarFiles, setAvatarFiles] = useState([]); @@ -94,30 +452,30 @@ export const Settings = () => { }); const displayNameMutation = useMutation({ - mutationFn: async (data: { display_name: string }) => { - return await fetchWithAuth({ + mutationFn: async (data: { display_name: string }) => + fetchWithAuth({ method: "POST", route: "/settings/display_name", body: data, - }); - }, - onSuccess: (responseJson: any) => { + }), + onSuccess: (responseJson: { display_name: string }) => { enqueueSnackbar("Display name updated successfully.", { variant: "success", }); - // Grab the current auth state & update it if (user) { signIn({ - token: localStorage.getItem("_auth")!, // reuse current token - expiresIn: 300, // reuse the expiry window you want + token: localStorage.getItem("_auth")!, + expiresIn: 300, tokenType: "bearer", authState: { ...user, - display_name: responseJson.display_name, // overwrite just this field + display_name: responseJson.display_name, }, }); } + + setIsEditing(false); }, onError: () => { enqueueSnackbar("Failed to update display name.", { variant: "error" }); @@ -128,7 +486,6 @@ export const Settings = () => { displayNameMutation.mutate({ display_name }); }; - const queryClient = useQueryClient(); const getRedirectPageQuery = useQuery({ queryKey: ["redirectPage"], queryFn: async () => @@ -139,28 +496,26 @@ export const Settings = () => { }); const redirectMutation = useMutation({ - mutationFn: async (data: { redirect_page: string }) => { - return await fetchWithAuth({ + mutationFn: async (data: { redirect_page: string }) => + fetchWithAuth({ method: "POST", route: "/settings/redirect_page", body: data, - }); - }, + }), onSuccess: (responseJson: { message: string; redirect_page: string }) => { enqueueSnackbar("Redirect page updated successfully.", { variant: "success", }); queryClient.invalidateQueries(["redirectPage"]); - // Grab the current auth state & update it if (user) { signIn({ - token: localStorage.getItem("_auth")!, // reuse current token - expiresIn: 300, // reuse the expiry window you want + token: localStorage.getItem("_auth")!, + expiresIn: 300, tokenType: "bearer", authState: { ...user, - redirect_page: responseJson.redirect_page, // overwrite just this field + redirect_page: responseJson.redirect_page, }, }); } @@ -174,12 +529,13 @@ export const Settings = () => { control: redirectControl, handleSubmit: handleRedirectSubmit, reset: redirectReset, + watch: watchRedirectPage, } = useForm({ resolver: yupResolver(redirectSchema), defaultValues: { redirect_page: getRedirectPageQuery?.data?.redirect_page ?? "/", }, - values: { redirect_page: getRedirectPageQuery?.data?.redirect_page ?? "/" }, // react-hook-form v7 pattern for sync + values: { redirect_page: getRedirectPageQuery?.data?.redirect_page ?? "/" }, }); useEffect(() => { @@ -188,24 +544,28 @@ export const Settings = () => { } }, [getRedirectPageQuery.data, redirectReset]); - const onRedirectSubmit = (data: any) => { + const onRedirectSubmit = (data: { redirect_page: string }) => { redirectMutation.mutate(data); }; + const currentRedirectPage = getRedirectPageQuery.data?.redirect_page ?? "/"; + const selectedRedirectPage = watchRedirectPage("redirect_page"); + const isRedirectSelectionUnchanged = + selectedRedirectPage === currentRedirectPage; + const passwordMutation = useMutation({ mutationFn: async (data: { currentPassword: string; newPassword: string; - }) => { - return await fetchWithAuth({ + }) => + fetchWithAuth({ method: "POST", route: "/settings/password_reset", body: { current_password: data.currentPassword, new_password: data.newPassword, }, - }); - }, + }), onSuccess: () => { enqueueSnackbar("Password updated successfully.", { variant: "success", @@ -240,7 +600,10 @@ export const Settings = () => { }, }); - const onPasswordSubmit = (data: any) => { + const onPasswordSubmit = (data: { + currentPassword: string; + newPassword: string; + }) => { passwordMutation.mutate({ currentPassword: data.currentPassword, newPassword: data.newPassword, @@ -252,7 +615,7 @@ export const Settings = () => { const formData = new FormData(); formData.append("avatar", file); - return await fetchWithAuth({ + return fetchWithAuth({ method: "POST", route: "/settings/avatar", body: formData, @@ -283,12 +646,11 @@ export const Settings = () => { }); const clearAvatarMutation = useMutation({ - mutationFn: async () => { - return await fetchWithAuth({ + mutationFn: async () => + fetchWithAuth({ method: "DELETE", route: "/settings/avatar", - }); - }, + }), onSuccess: () => { enqueueSnackbar("Avatar removed successfully.", { variant: "success", @@ -325,6 +687,50 @@ export const Settings = () => { avatarMutation.mutate(file); }; + const userSessionsQuery = useQuery({ + queryKey: ["userSessions"], + queryFn: async () => + fetchWithAuth({ + method: "GET", + route: "/user-sessions", + }), + }); + + const closeSessionMutation = useMutation({ + mutationFn: async (sessionIdentifier: string) => + fetchWithAuth({ + method: "DELETE", + route: `/user-sessions/${sessionIdentifier}`, + }), + onSuccess: () => { + enqueueSnackbar("Session closed successfully.", { + variant: "success", + }); + queryClient.invalidateQueries(["userSessions"]); + }, + onError: (error: Error) => { + enqueueSnackbar(error.message || "Failed to close session.", { + variant: "error", + }); + }, + }); + + const sessions = useMemo( + () => + (userSessionsQuery.data?.sessions ?? []).map((session) => ({ + ...session, + is_current: + session.is_current || + session.session_identifier === trackedSession?.sessionIdentifier, + })), + [trackedSession?.sessionIdentifier, userSessionsQuery.data?.sessions], + ); + + const knownDevices = useMemo( + () => userSessionsQuery.data?.known_devices ?? [], + [userSessionsQuery.data?.known_devices], + ); + const handleClearCachedData = () => { setIsClearingCachedData(true); @@ -346,558 +752,503 @@ export const Settings = () => { return ( - + - {/* User Info */} - - - User Information - - - - + + + - Full Name: - - - - Email: - - - - Username: - - - - {!isEditing ? ( - <> - Display Name: - + + + {user?.full_name ?? "N/A"} + + } /> - setIsEditing(true)} - > - - - - ) : ( - <> - ( - - )} + + + + {user?.email ?? "N/A"} + + } /> - - { - displayNameReset({ - display_name: user?.display_name ?? "", - }); - setIsEditing(false); - }} - > - - - - - - - - )} - - - Role: - - - - Active: - - - - - - - Preferences - - - - - }> - Avatar Configuration - - - + + + {user?.username ?? "N/A"} + + } + /> + + + } + /> + + + } + /> + + + + + {hasAdminScope ? ( + + ) : null} + + } + /> + + + + + + + - - - - - + + + Display name + + + This is how your name appears across the application. + + + + {!isEditing ? ( + + + + ) : ( + + ( + + )} + /> - - - - - - - }> - - Redirect Page After Login - - - - - -
- - - { - // flatten all available paths - const availablePaths = [ - ...redirectOptions.public.map((o) => o.path), - ...(hasReadScope - ? redirectOptions.technician.map( - (o) => o.path, - ) - : []), - ...(hasAdminScope - ? redirectOptions.admin.map((o) => o.path) - : []), - ]; - - // guard: if no options available yet, render empty select - if ( - getRedirectPageQuery.isFetching && - availablePaths.length === 0 - ) { - return ( - - ); - } - - const safeValue = availablePaths.includes( - field.value, - ) - ? field.value - : "/"; - - return ( - field.onChange(e)} - > - {redirectOptions.public.length > 0 && [ - - Pages - , - ...redirectOptions.public.map( - (option) => { - const Icon = option.icon; - return ( - - - - - - {option.label} - - - ); - }, - ), - ]} - {hasReadScope && - redirectOptions.technician.length > 0 && [ - - Pages - , - ...redirectOptions.technician.map( - (option) => { - const Icon = option.icon; - return ( - - - - - - {option.label} - {option.parent === "reports" - ? " Report" - : null} - - - ); - }, - ), - ]} - {hasAdminScope && - redirectOptions.admin.length > 0 && [ - - Pages - , - ...redirectOptions.admin.map( - (option) => { - const Icon = option.icon; - return ( - - - - - - {option.label} - - - ); - }, - ), - ]} - - ); - }} - /> - - - - - -
-
-
-
-
- - }> - Password Reset - - - - -
- - - ( - - - setShowCurrentPassword( - (show) => !show, - ) - } - > - {showCurrentPassword ? ( - - ) : ( - - )} - - - ), - }} - /> - )} - /> - - - ( - - - setShowNewPassword((show) => !show) - } - > - {showNewPassword ? ( - - ) : ( - - )} - - - ), - }} - /> - )} - /> - - - ( - - - setShowConfirmPassword( - (show) => !show, - ) - } - > - {showConfirmPassword ? ( - - ) : ( - - )} - - - ), - }} - /> - )} - /> - - - + + + )} + + + + + + + + Avatar + + + Upload or replace your account image. + + + + + + + + - -
+ + +
-
- - - - - - Cached Data - - - - - }> - Clear Saved Cache - - + + + + + + + + + + Default landing page + + + Choose where the app should take you after you sign in. + + + {getRedirectPageQuery.isLoading ? ( + + ) : ( + ( + + {redirectOptions.map((route) => { + const RouteIcon = route.icon; + + return ( + + + + + {route.label} + + ); + })} + + )} + /> + )} + + + + + + + + + + + Cached map data + + + Clear saved client-side caches if the app feels out of sync. + + + + + + + + + - - Clears stored app data so pages load fresh information the - next time you open them. + + Change password - + ( + + + setShowCurrentPassword((current) => !current) + } + edge="end" + > + {showCurrentPassword ? : } + + + ), + }} + /> + )} + /> + ( + + setShowNewPassword((current) => !current)} + edge="end" + > + {showNewPassword ? : } + + + ), + }} + /> + )} + /> + ( + + + setShowConfirmPassword((current) => !current) + } + edge="end" + > + {showConfirmPassword ? : } + + + ), + }} + /> + )} + /> + - - - + + + + + + + + + {userSessionsQuery.isLoading ? ( + + + + + + ) : userSessionsQuery.isError ? ( + + Unable to load session history right now. + + ) : sessions.length === 0 ? ( + No recorded sessions were found. + ) : ( + + {sessions.map((session) => ( + + closeSessionMutation.mutate(sessionIdentifier) + } + /> + ))} + + )} + + + + {userSessionsQuery.isLoading ? ( + + + + + ) : userSessionsQuery.isError ? ( + + Unable to load known devices right now. + + ) : knownDevices.length === 0 ? ( + No known devices were found. + ) : ( + + {knownDevices.map((device) => ( + + ))} + + )} + + +
diff --git a/migrations/20260318070000_create_user_sessions_table.down.sql b/migrations/20260318070000_create_user_sessions_table.down.sql new file mode 100644 index 00000000..1dded3bc --- /dev/null +++ b/migrations/20260318070000_create_user_sessions_table.down.sql @@ -0,0 +1,12 @@ +DROP INDEX IF EXISTS public.ix_user_sessions_sign_out_reason_type_id; +DROP INDEX IF EXISTS public.ix_user_sessions_fingerprint_hash; +DROP INDEX IF EXISTS public.ix_user_sessions_signed_out_at; +DROP INDEX IF EXISTS public.ix_user_sessions_signed_in_at; +DROP INDEX IF EXISTS public.ix_user_sessions_last_seen_at; +DROP INDEX IF EXISTS public.ix_user_sessions_is_active; +DROP INDEX IF EXISTS public.ix_user_sessions_session_identifier; +DROP INDEX IF EXISTS public.ix_user_sessions_user_id; +DROP INDEX IF EXISTS public.ix_user_sessions_id; + +DROP TABLE IF EXISTS public.user_sessions; +DROP TABLE IF EXISTS public.sign_out_reason_type_lu; diff --git a/migrations/20260318070000_create_user_sessions_table.up.sql b/migrations/20260318070000_create_user_sessions_table.up.sql new file mode 100644 index 00000000..1e6ea780 --- /dev/null +++ b/migrations/20260318070000_create_user_sessions_table.up.sql @@ -0,0 +1,70 @@ +CREATE TABLE public.sign_out_reason_type_lu ( + id serial4 NOT NULL, + "name" varchar(50) NOT NULL, + description text NULL, + CONSTRAINT sign_out_reason_type_lu_pkey PRIMARY KEY (id), + CONSTRAINT sign_out_reason_type_lu_name_key UNIQUE ("name") +); + +INSERT INTO public.sign_out_reason_type_lu ("name", description) VALUES + ('manual_logout', 'The user explicitly signed out of the application'), + ('session_expired', 'The client session expired before an authenticated logout could be completed'), + ('forced_logout', 'The session was invalidated administratively or due to a security event'), + ('unknown', 'The session ended without a known sign-out reason'); + +CREATE TABLE public.user_sessions ( + id serial4 NOT NULL, + user_id int4 NOT NULL, + session_identifier varchar(36) NOT NULL, + ip_address varchar(255) NULL, + user_agent text NULL, + device_label varchar(255) NULL, + device_type varchar(100) NULL, + browser varchar(100) NULL, + operating_system varchar(100) NULL, + fingerprint_hash varchar(128) NULL, + signed_in_at timestamp NOT NULL DEFAULT now(), + last_seen_at timestamp NOT NULL DEFAULT now(), + signed_out_at timestamp NULL, + is_active bool NOT NULL DEFAULT true, + sign_out_reason_type_id int4 NULL, + CONSTRAINT user_sessions_pkey PRIMARY KEY (id), + CONSTRAINT user_sessions_session_identifier_key UNIQUE (session_identifier), + CONSTRAINT fk_user_sessions_user + FOREIGN KEY (user_id) + REFERENCES public."Users"(id) + ON DELETE CASCADE + ON UPDATE CASCADE, + CONSTRAINT fk_user_sessions_sign_out_reason_type + FOREIGN KEY (sign_out_reason_type_id) + REFERENCES public.sign_out_reason_type_lu(id) + ON DELETE RESTRICT + ON UPDATE CASCADE +); + +CREATE INDEX ix_user_sessions_id + ON public.user_sessions USING btree (id); + +CREATE INDEX ix_user_sessions_user_id + ON public.user_sessions USING btree (user_id); + +CREATE INDEX ix_user_sessions_session_identifier + ON public.user_sessions USING btree (session_identifier); + +CREATE INDEX ix_user_sessions_is_active + ON public.user_sessions USING btree (is_active); + +CREATE INDEX ix_user_sessions_last_seen_at + ON public.user_sessions USING btree (last_seen_at); + +CREATE INDEX ix_user_sessions_signed_in_at + ON public.user_sessions USING btree (signed_in_at); + +CREATE INDEX ix_user_sessions_signed_out_at + ON public.user_sessions USING btree (signed_out_at); + +CREATE INDEX ix_user_sessions_fingerprint_hash + ON public.user_sessions USING btree (fingerprint_hash); + +CREATE INDEX ix_user_sessions_sign_out_reason_type_id + ON public.user_sessions USING btree (sign_out_reason_type_id); From feeb4dab2cc180e4b38cb89adc8d63186149f6c1 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Thu, 19 Mar 2026 09:37:29 -0500 Subject: [PATCH 02/22] feat(Improve Settings UI & CustomHeader) --- frontend/src/AppLayout.tsx | 2 +- frontend/src/components/BackgroundBox.tsx | 1 - frontend/src/components/CustomCardHeader.tsx | 40 +- frontend/src/views/Settings.tsx | 1013 +++++++++--------- 4 files changed, 524 insertions(+), 532 deletions(-) diff --git a/frontend/src/AppLayout.tsx b/frontend/src/AppLayout.tsx index 67ec5ab9..3787512e 100644 --- a/frontend/src/AppLayout.tsx +++ b/frontend/src/AppLayout.tsx @@ -99,7 +99,7 @@ export const AppLayout = ({ children }: { children: JSX.Element }) => { sx={{ minHeight: "100vh", ml: shouldShowDesktopSidebar ? `${effectiveSidebarWidth}px` : 0, - mt: "4rem", + mt: "3.75rem", width: shouldShowDesktopSidebar ? `calc(100% - ${effectiveSidebarWidth}px)` : "100%", diff --git a/frontend/src/components/BackgroundBox.tsx b/frontend/src/components/BackgroundBox.tsx index 4c02ed9a..b9f852dc 100644 --- a/frontend/src/components/BackgroundBox.tsx +++ b/frontend/src/components/BackgroundBox.tsx @@ -12,7 +12,6 @@ export const BackgroundBox: React.FC = ({ mx: "auto", // center horizontally maxWidth: "2xl", height: "fit-content", - pb: 6, ...sx, }} {...rest} diff --git a/frontend/src/components/CustomCardHeader.tsx b/frontend/src/components/CustomCardHeader.tsx index eb9df1db..2d67e3c2 100644 --- a/frontend/src/components/CustomCardHeader.tsx +++ b/frontend/src/components/CustomCardHeader.tsx @@ -1,4 +1,3 @@ -import React from "react"; import { CardHeader, CardHeaderProps, @@ -14,6 +13,7 @@ type CustomCardHeaderProps = Omit & { export const CustomCardHeader: React.FC = ({ title, + subheader, icon: Icon = null, sx, ...rest @@ -24,11 +24,10 @@ export const CustomCardHeader: React.FC = ({ = ({ fontSize: "1.1rem", }} > + + {title} + {Icon && ( )} - - {title} - } + subheader={ + subheader && ( + + + {subheader} + + + ) + } sx={{ mb: 0, - pb: 0, + p: 0, ...sx, }} {...rest} diff --git a/frontend/src/views/Settings.tsx b/frontend/src/views/Settings.tsx index 50ea875d..ec3f12a0 100644 --- a/frontend/src/views/Settings.tsx +++ b/frontend/src/views/Settings.tsx @@ -32,7 +32,6 @@ import { HistoryRounded, LaptopMacRounded, PhoneIphoneRounded, - Settings as SettingsIcon, SettingsApplications, ShieldOutlined, TabletMacRounded, @@ -45,7 +44,6 @@ import { BackgroundBox, CustomCardHeader, ImageUploadWithPreview, - IsTrueChip, RoleChip, } from "@/components"; import { navConfig } from "@/constants"; @@ -170,41 +168,9 @@ function SectionCard({ children: React.ReactNode; }) { return ( - - - - - - - - - - {title} - - - {description} - - - - {children} - - + + + {children} ); } @@ -263,13 +229,6 @@ function SessionRow({ - {session.is_current ? ( - } - label="Current device" - /> - ) : null} Signed in - {formatDateTime(session.signed_in_at)} + + {formatDateTime(session.signed_in_at)} + Last seen - {formatDateTime(session.last_seen_at)} ({formatRelativeTime(session.last_seen_at)}) + {formatDateTime(session.last_seen_at)} ( + {formatRelativeTime(session.last_seen_at)}) @@ -371,7 +333,11 @@ function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { {device.is_current_device ? ( - } label="Current device" /> + } + label="Current device" + /> ) : null} @@ -386,7 +352,9 @@ function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { Active now - {device.active_session_count} + + {device.active_session_count} + @@ -400,7 +368,9 @@ function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { Last seen - {formatDateTime(device.last_seen_at)} + + {formatDateTime(device.last_seen_at)} + @@ -751,507 +721,514 @@ export const Settings = () => { return ( - - - - - - - - - - - {user?.full_name ?? "N/A"} - - } - /> - - - - {user?.email ?? "N/A"} - - } - /> - - - - {user?.username ?? "N/A"} - - } - /> - - - } - /> - - - } - /> - - - - - {hasAdminScope ? ( - - ) : null} - - } - /> - + + + + + + + + {user?.full_name ?? "N/A"} + + } + /> - - - - - - - - Display name + + + {user?.email ?? "N/A"} - - This is how your name appears across the application. + } + /> + + + + {user?.username ?? "N/A"} - - - {!isEditing ? ( - - - - - ) : ( - - ( - - )} - /> - - - - )} - - + } + /> + + + } + /> + + - - - - - Avatar - - - Upload or replace your account image. - - - - - - - - - - - - - - - - + - - - - - - Default landing page - - - Choose where the app should take you after you sign in. - - - {getRedirectPageQuery.isLoading ? ( - - ) : ( + + + Display name + + + This is how your name appears across the application. + + + + {!isEditing ? ( + + + + + ) : ( + ( - - {redirectOptions.map((route) => { - const RouteIcon = route.icon; - - return ( - - - - - {route.label} - - ); - })} - + )} /> - )} - + - - - + + )} + + - - - - - Cached map data - - - Clear saved client-side caches if the app feels out of sync. - - - - - - + Upload or replace your account image. + + + + + + + + + + + + + + + + - + - - + + - Change password + Default landing page + + Choose where the app should take you after you sign in. + + + {getRedirectPageQuery.isLoading ? ( + + ) : ( ( - - - setShowCurrentPassword((current) => !current) - } - edge="end" - > - {showCurrentPassword ? : } - - - ), - }} - /> - )} - /> - ( - - setShowNewPassword((current) => !current)} - edge="end" - > - {showNewPassword ? : } - - - ), - }} - /> - )} - /> - ( - - setShowConfirmPassword((current) => !current) - } - edge="end" + select + fullWidth + label="Redirect page" + > + {redirectOptions.map((route) => { + const RouteIcon = route.icon; + + return ( + + - {showConfirmPassword ? : } - - - ), - }} - /> + + + {route.label} + + ); + })} + )} /> - - - - - - - - + )} + + + + + - - - - {userSessionsQuery.isLoading ? ( - - - - - - ) : userSessionsQuery.isError ? ( - - Unable to load session history right now. - - ) : sessions.length === 0 ? ( - No recorded sessions were found. - ) : ( - - {sessions.map((session) => ( - - closeSessionMutation.mutate(sessionIdentifier) - } - /> - ))} - - )} - + + + + Cached map data + + + Clear saved client-side caches if the app feels out of + sync. + + + + + + - + - {userSessionsQuery.isLoading ? ( - - - - - ) : userSessionsQuery.isError ? ( - - Unable to load known devices right now. - - ) : knownDevices.length === 0 ? ( - No known devices were found. - ) : ( - - {knownDevices.map((device) => ( - - ))} - - )} - - - - - - + + + Change password + + ( + + + setShowCurrentPassword((current) => !current) + } + edge="end" + > + {showCurrentPassword ? ( + + ) : ( + + )} + + + ), + }} + /> + )} + /> + ( + + + setShowNewPassword((current) => !current) + } + edge="end" + > + {showNewPassword ? ( + + ) : ( + + )} + + + ), + }} + /> + )} + /> + ( + + + setShowConfirmPassword((current) => !current) + } + edge="end" + > + {showConfirmPassword ? ( + + ) : ( + + )} + + + ), + }} + /> + )} + /> + + + + + + + + + + + + + {userSessionsQuery.isLoading ? ( + + + + + + ) : userSessionsQuery.isError ? ( + + Unable to load session history right now. + + ) : sessions.length === 0 ? ( + No recorded sessions were found. + ) : ( + + {sessions.map((session) => ( + + closeSessionMutation.mutate(sessionIdentifier) + } + /> + ))} + + )} + + + + {userSessionsQuery.isLoading ? ( + + + + + ) : userSessionsQuery.isError ? ( + + Unable to load known devices right now. + + ) : knownDevices.length === 0 ? ( + No known devices were found. + ) : ( + + {knownDevices.map((device) => ( + + ))} + + )} + + + + ); }; From d05288d9afdc9a47eac4c7faf0e70e10f1b2baba Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 20 Mar 2026 15:16:56 -0500 Subject: [PATCH 03/22] feat(UserSession): Add user session --- api/routes/user_sessions.py | 26 ++ api/security.py | 28 +- frontend/src/AppLayout.tsx | 2 + frontend/src/components/SectionCard.tsx | 21 ++ .../src/components/SessionStatusPoller.tsx | 67 ++++ frontend/src/components/Topbar.tsx | 12 +- frontend/src/components/index.ts | 1 + frontend/src/hooks/useFetchWithAuth.ts | 14 +- frontend/src/service/ApiServiceNew.ts | 20 +- frontend/src/utils/AuthSession.ts | 33 ++ frontend/src/views/Settings.tsx | 346 ++++++++---------- 11 files changed, 339 insertions(+), 231 deletions(-) create mode 100644 frontend/src/components/SectionCard.tsx create mode 100644 frontend/src/components/SessionStatusPoller.tsx create mode 100644 frontend/src/utils/AuthSession.ts diff --git a/api/routes/user_sessions.py b/api/routes/user_sessions.py index 69fc7bec..466c96af 100644 --- a/api/routes/user_sessions.py +++ b/api/routes/user_sessions.py @@ -58,6 +58,11 @@ class UserSessionsResponse(ORMBase): known_devices: list[KnownDeviceSummary] +class CurrentSessionStatusResponse(ORMBase): + session_identifier: str + is_active: bool + + def serialize_session( session: UserSessions, *, @@ -94,6 +99,27 @@ def get_known_device_key(session: UserSessions) -> str: return f"derived:{'|'.join(fallback_parts)}" +@user_sessions_router.get( + "/user-sessions/current/status", + response_model=CurrentSessionStatusResponse, +) +def get_current_session_status( + _: Users = Depends(get_current_user), + token: str = Depends(oauth2_scheme), +): + current_session_identifier = get_session_identifier_from_token(token) + if not current_session_identifier: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Session identifier is missing from token", + ) + + return CurrentSessionStatusResponse( + session_identifier=current_session_identifier, + is_active=True, + ) + + @user_sessions_router.get( "/user-sessions", response_model=UserSessionsResponse, diff --git a/api/security.py b/api/security.py index ca0de900..1728b8a2 100644 --- a/api/security.py +++ b/api/security.py @@ -10,7 +10,7 @@ from sqlalchemy.orm import joinedload, undefer, Session from sqlalchemy.sql import select -from api.models.main_models import Users, UserRoles, SecurityScopes +from api.models.main_models import Users, UserRoles, SecurityScopes, UserSessions from api.schemas import security_schemas from api.session import get_db @@ -39,6 +39,12 @@ headers={"WWW-Authenticate": "Bearer"}, ) +inactive_session_exception = HTTPException( + status_code=440, + detail="Session is no longer active. Please login again.", + headers={"WWW-Authenticate": "Bearer"}, +) + # Return the current user if credentials were correct, False if not def authenticate_user(login_identifier: str, password: str, db: Session): @@ -116,16 +122,36 @@ def get_current_user( if username is None: raise invalid_credentials_exception + session_identifier: str | None = payload.get("sid") + if session_identifier is None: + raise invalid_credentials_exception + user = get_user(username=username, db=db) if user is None: raise invalid_credentials_exception + session = ( + db.query(UserSessions) + .filter( + UserSessions.session_identifier == session_identifier, + UserSessions.user_id == user.id, + UserSessions.is_active.is_(True), + UserSessions.signed_out_at.is_(None), + ) + .first() + ) + if session is None: + raise inactive_session_exception + return user except ExpiredSignatureError: raise expired_token_exception + except HTTPException: + raise + except Exception: raise invalid_credentials_exception diff --git a/frontend/src/AppLayout.tsx b/frontend/src/AppLayout.tsx index 3787512e..dd72ff1d 100644 --- a/frontend/src/AppLayout.tsx +++ b/frontend/src/AppLayout.tsx @@ -1,6 +1,7 @@ import { useEffect, useState } from "react"; import { Box, useMediaQuery, useTheme } from "@mui/material"; import { Topbar } from "@/components"; +import { SessionStatusPoller } from "@/components/SessionStatusPoller"; import { DESKTOP_COLLAPSED_WIDTH, SidebarInset } from "@/components/ui/sidebar"; import Sidenav from "./sidenav"; import { useAuthUser } from "react-auth-kit"; @@ -74,6 +75,7 @@ export const AppLayout = ({ children }: { children: JSX.Element }) => { bgcolor: "#a5adb5", }} > + {isLoggedIn ? : null} + + {children} + + ); +} diff --git a/frontend/src/components/SessionStatusPoller.tsx b/frontend/src/components/SessionStatusPoller.tsx new file mode 100644 index 00000000..46d67c4c --- /dev/null +++ b/frontend/src/components/SessionStatusPoller.tsx @@ -0,0 +1,67 @@ +import { useEffect } from "react"; +import { useNavigate } from "@tanstack/react-router"; +import { useAuthHeader, useSignOut } from "react-auth-kit"; +import { API_URL } from "@/config"; +import { handleExpiredSession } from "@/utils/AuthSession"; + +const SESSION_STATUS_POLL_INTERVAL_MS = 15000; + +export function SessionStatusPoller() { + const authHeader = useAuthHeader(); + const signOut = useSignOut(); + const navigate = useNavigate(); + + useEffect(() => { + let isMounted = true; + + const pollSessionStatus = async () => { + const authorization = authHeader(); + if (!authorization || document.hidden) { + return; + } + + try { + const response = await fetch(`${API_URL}/user-sessions/current/status`, { + headers: { + Authorization: authorization, + }, + }); + + if ( + response.status === 440 && + localStorage.getItem("loggedIn") && + isMounted + ) { + handleExpiredSession({ + signOut, + navigate, + }); + } + } catch { + // Ignore transient network errors and try again on the next interval. + } + }; + + void pollSessionStatus(); + + const intervalId = window.setInterval(() => { + void pollSessionStatus(); + }, SESSION_STATUS_POLL_INTERVAL_MS); + + const handleVisibilityChange = () => { + if (!document.hidden) { + void pollSessionStatus(); + } + }; + + document.addEventListener("visibilitychange", handleVisibilityChange); + + return () => { + isMounted = false; + window.clearInterval(intervalId); + document.removeEventListener("visibilitychange", handleVisibilityChange); + }; + }, [authHeader, navigate, signOut]); + + return null; +} diff --git a/frontend/src/components/Topbar.tsx b/frontend/src/components/Topbar.tsx index a5cef4c5..69ae03f9 100644 --- a/frontend/src/components/Topbar.tsx +++ b/frontend/src/components/Topbar.tsx @@ -36,7 +36,10 @@ import { import { BgColor } from "@/constants"; import { useIsActiveRoute } from "@/hooks"; import { useGetUnreadNotificationCount } from "@/service"; -import { clearTrackedSession, notifyTrackedLogout } from "@/utils/SessionTracking"; +import { + clearTrackedSession, + notifyTrackedLogout, +} from "@/utils/SessionTracking"; export const Topbar = ({ open, @@ -453,16 +456,13 @@ export const Topbar = ({ ) : ( - - ) : ( - - ( - - )} - /> - - - - )} - - + This is how your name appears across the application. + + - - - - - Avatar - - - Upload or replace your account image. - - - - - - - - + {!isEditing ? ( + + + + ) : ( + + ( + + )} + /> + + )} + + + + + + + Avatar + + + Upload or replace your account image. + + + + + + + + + + + + - - - + + + Date: Fri, 20 Mar 2026 15:33:21 -0500 Subject: [PATCH 04/22] refactor(/components): Reorganize components into logical folders --- frontend/src/AppLayout.tsx | 2 +- frontend/src/components/AvatarPicker.tsx | 141 ------------------ frontend/src/components/DMSentry.tsx | 47 ------ frontend/src/components/LinkBehavior.tsx | 17 --- frontend/src/components/MeterTypeSelect.tsx | 39 ----- .../src/components/ModalBackgroundBox.tsx | 33 ---- .../{ => Modals}/MergeWellModal.tsx | 2 +- .../components/RHControlled/NSPChipSelect.tsx | 91 ----------- frontend/src/components/ReportsNavItem.tsx | 73 --------- .../{ => display}/DirectionCard.tsx | 0 .../{ => display}/EventTypeChip.tsx | 0 .../components/{ => display}/IsTrueChip.tsx | 0 .../{ => display}/PlotContextMenu.tsx | 0 .../src/components/{ => display}/RoleChip.tsx | 0 .../src/components/{ => display}/StatCell.tsx | 0 .../{ => display}/StyledToggleButton.tsx | 0 .../src/components/{ => display}/TabPanel.tsx | 0 .../{ => display}/TristateToggle.tsx | 0 .../src/components/{ => forms}/ChipSelect.tsx | 0 .../{ => forms}/MeterRegisterSelect.tsx | 0 .../components/{ => forms}/MeterSelection.tsx | 0 .../components/{ => forms}/UserSelection.tsx | 0 .../components/{ => forms}/WellSelection.tsx | 0 .../{ => forms}/WorkOrderSelect.tsx | 0 .../controlled}/ControlledActivitySelect.tsx | 0 .../controlled}/ControlledAutocomplete.tsx | 0 .../controlled}/ControlledCheckbox.tsx | 0 .../controlled}/ControlledDMS.tsx | 0 .../controlled}/ControlledDatepicker.tsx | 0 .../ControlledMeterRegisterSelect.tsx | 0 .../controlled}/ControlledMeterSelection.tsx | 0 .../ControlledMeterStatusTypeSelect.tsx | 0 .../controlled}/ControlledMeterTypeSelect.tsx | 0 .../controlled}/ControlledPartTypeSelect.tsx | 0 .../controlled}/ControlledSelect.tsx | 0 .../controlled}/ControlledTextbox.tsx | 0 .../controlled}/ControlledTimepicker.tsx | 0 .../controlled}/ControlledUserSelect.tsx | 2 +- .../controlled}/ControlledWellSelection.tsx | 0 .../controlled}/ControlledWorkOrderSelect.tsx | 0 .../controlled}/NotesChipSelect.tsx | 0 .../controlled}/PartsChipSelect.tsx | 0 .../controlled}/ServicesChipSelect.tsx | 0 .../controlled}/index.ts | 1 - frontend/src/components/index.ts | 74 +++++---- .../components/{ => layout}/BackgroundBox.tsx | 0 .../{ => layout}/CustomCardHeader.tsx | 0 .../{ => layout}/GridFooterWithButton.tsx | 0 .../components/{ => layout}/SectionCard.tsx | 0 .../src/components/{ => layout}/Topbar.tsx | 0 .../{ => layout}/TopbarUserButton.tsx | 2 +- .../{ => maps}/MapFullscreenToggle.tsx | 0 .../components/{ => maps}/MapUrlStateSync.tsx | 0 .../{ => maps}/MeterMapColorLegend.tsx | 0 .../components/{ => maps}/WellMapLegend.tsx | 4 +- .../{MapIcons => maps/icons}/Black.tsx | 2 +- .../{MapIcons => maps/icons}/Blue.tsx | 0 .../{MapIcons => maps/icons}/Red.tsx | 2 +- .../{MapIcons => maps/icons}/index.ts | 0 .../layers}/BoundariesLayer.tsx | 0 .../layers}/OpenStreetMapLayer.tsx | 0 .../layers}/SatelliteLayer.tsx | 0 .../layers}/SoutheastGuideLayer.tsx | 0 .../layers}/TransportationLayer.tsx | 0 .../{Layers => maps/layers}/index.ts | 0 .../components/{ => media}/ImageDialog.tsx | 0 .../{ => media}/ImagePreviewGrid.tsx | 0 .../{ => media}/ImageUploadWithPreview.tsx | 5 +- .../src/components/{ => media}/UserAvatar.tsx | 0 .../ManageBreadcrumbTitle.tsx | 0 .../components/{ => navigation}/NavLink.tsx | 0 .../ReportBreadcrumbTitle.tsx | 0 .../{ => session}/SessionStatusPoller.tsx | 0 .../src/views/Chlorides/ChloridesPlot.tsx | 2 +- .../MonitoringWells/MonitoringWellsPlot.tsx | 2 +- frontend/src/views/Parts/PartsHistory.tsx | 5 +- .../src/views/Reports/Chlorides/index.tsx | 2 +- frontend/src/views/Settings.tsx | 1 - .../views/WellManagement/WellSelectionMap.tsx | 2 +- 79 files changed, 52 insertions(+), 499 deletions(-) delete mode 100644 frontend/src/components/AvatarPicker.tsx delete mode 100644 frontend/src/components/DMSentry.tsx delete mode 100644 frontend/src/components/LinkBehavior.tsx delete mode 100644 frontend/src/components/MeterTypeSelect.tsx delete mode 100644 frontend/src/components/ModalBackgroundBox.tsx rename frontend/src/components/{ => Modals}/MergeWellModal.tsx (98%) delete mode 100644 frontend/src/components/RHControlled/NSPChipSelect.tsx delete mode 100644 frontend/src/components/ReportsNavItem.tsx rename frontend/src/components/{ => display}/DirectionCard.tsx (100%) rename frontend/src/components/{ => display}/EventTypeChip.tsx (100%) rename frontend/src/components/{ => display}/IsTrueChip.tsx (100%) rename frontend/src/components/{ => display}/PlotContextMenu.tsx (100%) rename frontend/src/components/{ => display}/RoleChip.tsx (100%) rename frontend/src/components/{ => display}/StatCell.tsx (100%) rename frontend/src/components/{ => display}/StyledToggleButton.tsx (100%) rename frontend/src/components/{ => display}/TabPanel.tsx (100%) rename frontend/src/components/{ => display}/TristateToggle.tsx (100%) rename frontend/src/components/{ => forms}/ChipSelect.tsx (100%) rename frontend/src/components/{ => forms}/MeterRegisterSelect.tsx (100%) rename frontend/src/components/{ => forms}/MeterSelection.tsx (100%) rename frontend/src/components/{ => forms}/UserSelection.tsx (100%) rename frontend/src/components/{ => forms}/WellSelection.tsx (100%) rename frontend/src/components/{ => forms}/WorkOrderSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledActivitySelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledAutocomplete.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledCheckbox.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledDMS.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledDatepicker.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledMeterRegisterSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledMeterSelection.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledMeterStatusTypeSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledMeterTypeSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledPartTypeSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledTextbox.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledTimepicker.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledUserSelect.tsx (99%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledWellSelection.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ControlledWorkOrderSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/NotesChipSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/PartsChipSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/ServicesChipSelect.tsx (100%) rename frontend/src/components/{RHControlled => forms/controlled}/index.ts (95%) rename frontend/src/components/{ => layout}/BackgroundBox.tsx (100%) rename frontend/src/components/{ => layout}/CustomCardHeader.tsx (100%) rename frontend/src/components/{ => layout}/GridFooterWithButton.tsx (100%) rename frontend/src/components/{ => layout}/SectionCard.tsx (100%) rename frontend/src/components/{ => layout}/Topbar.tsx (100%) rename frontend/src/components/{ => layout}/TopbarUserButton.tsx (93%) rename frontend/src/components/{ => maps}/MapFullscreenToggle.tsx (100%) rename frontend/src/components/{ => maps}/MapUrlStateSync.tsx (100%) rename frontend/src/components/{ => maps}/MeterMapColorLegend.tsx (100%) rename frontend/src/components/{ => maps}/WellMapLegend.tsx (98%) rename frontend/src/components/{MapIcons => maps/icons}/Black.tsx (79%) rename frontend/src/components/{MapIcons => maps/icons}/Blue.tsx (100%) rename frontend/src/components/{MapIcons => maps/icons}/Red.tsx (80%) rename frontend/src/components/{MapIcons => maps/icons}/index.ts (100%) rename frontend/src/components/{Layers => maps/layers}/BoundariesLayer.tsx (100%) rename frontend/src/components/{Layers => maps/layers}/OpenStreetMapLayer.tsx (100%) rename frontend/src/components/{Layers => maps/layers}/SatelliteLayer.tsx (100%) rename frontend/src/components/{Layers => maps/layers}/SoutheastGuideLayer.tsx (100%) rename frontend/src/components/{Layers => maps/layers}/TransportationLayer.tsx (100%) rename frontend/src/components/{Layers => maps/layers}/index.ts (100%) rename frontend/src/components/{ => media}/ImageDialog.tsx (100%) rename frontend/src/components/{ => media}/ImagePreviewGrid.tsx (100%) rename frontend/src/components/{ => media}/ImageUploadWithPreview.tsx (96%) rename frontend/src/components/{ => media}/UserAvatar.tsx (100%) rename frontend/src/components/{ => navigation}/ManageBreadcrumbTitle.tsx (100%) rename frontend/src/components/{ => navigation}/NavLink.tsx (100%) rename frontend/src/components/{ => navigation}/ReportBreadcrumbTitle.tsx (100%) rename frontend/src/components/{ => session}/SessionStatusPoller.tsx (100%) diff --git a/frontend/src/AppLayout.tsx b/frontend/src/AppLayout.tsx index dd72ff1d..ab783bbb 100644 --- a/frontend/src/AppLayout.tsx +++ b/frontend/src/AppLayout.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from "react"; import { Box, useMediaQuery, useTheme } from "@mui/material"; import { Topbar } from "@/components"; -import { SessionStatusPoller } from "@/components/SessionStatusPoller"; +import { SessionStatusPoller } from "@/components/session/SessionStatusPoller"; import { DESKTOP_COLLAPSED_WIDTH, SidebarInset } from "@/components/ui/sidebar"; import Sidenav from "./sidenav"; import { useAuthUser } from "react-auth-kit"; diff --git a/frontend/src/components/AvatarPicker.tsx b/frontend/src/components/AvatarPicker.tsx deleted file mode 100644 index 4540e823..00000000 --- a/frontend/src/components/AvatarPicker.tsx +++ /dev/null @@ -1,141 +0,0 @@ -import { useState, useEffect } from "react"; -import { Grid, Box, Card, CardActionArea, Button, Typography } from "@mui/material"; -import { createAvatar } from "@dicebear/core"; -import { loreleiNeutral, initials } from "@dicebear/collection"; - -type AvatarPickerProps = { - onSelect: (avatar: string) => void; - initialSeed?: string; - display_name: string; -}; - -export default function AvatarPicker({ - onSelect, - initialSeed, - display_name, -}: AvatarPickerProps) { - const [avatars, setAvatars] = useState([]); - const [selected, setSelected] = useState(null); - - const generateAvatars = () => { - // Lorelei batch: random seed - const batchLorelei = Array.from({ length: 10 }, () => { - const seed = Math.random().toString(36).substring(2, 10); - return createAvatar(loreleiNeutral, { - size: 64, - seed, - }).toDataUri(); - }); - - // Initials batch: always use display_name, but vary style - const batchInitials = Array.from({ length: 2 }, () => { - const size = 64 + Math.floor(Math.random() * 20) - 10; // vary ±10 - const bgColors = [ - // Greys (dark enough for white contrast) - "424242", "616161", "757575", "546e7a", "455a64", - - // Blues (pair with pink secondary) - "1565c0", "1976d2", "1e88e5", "283593", "303f9f", - - // Teals & Cyans - "00838f", "0097a7", "00695c", "00796b", - - // Greens - "2e7d32", "388e3c", "43a047", "1b5e20", - - // Yellows & Ambers (pick deeper tones so white is readable) - "f57f17", "f9a825", "ff8f00", "ff6f00", - - // Oranges - "e65100", "ef6c00", "f4511e", "d84315", - - // Reds / Pinks (echo secondary) - "ad1457", "c2185b", "d81b60", "b71c1c", "c62828", - - // Purples (complement to indigo) - "6a1b9a", "7b1fa2", "8e24aa", "512da8", "5e35b1", - - // Indigo (close to primary but a bit varied) - "283593", "3949ab", "303f9f" - ]; - const backgroundColor = - bgColors[Math.floor(Math.random() * bgColors.length)]; - - return createAvatar(initials, { - size, - seed: display_name, // 👈 initials come from display_name - backgroundColor: [backgroundColor], - }).toDataUri(); - }); - - // Shuffle them together for variety - const mixed = [...batchLorelei, ...batchInitials].sort(() => Math.random() - 0.5); - - setAvatars(mixed); - setSelected(null); - }; - - // Generate initial batch on mount - useEffect(() => { - generateAvatars(); - }, []); - - // If initialSeed provided, generate that avatar as the selected one - useEffect(() => { - if (initialSeed) { - const avatar = createAvatar(loreleiNeutral, { - size: 64, - seed: initialSeed, - }).toDataUri(); - setSelected(avatar); - } - }, [initialSeed]); - - const handleSelect = (avatar: string) => { - setSelected(avatar); - onSelect(avatar); - }; - - return ( - - - Choose Your Avatar - - - {avatars.map((avatar, i) => ( - - - handleSelect(avatar)}> - - - - - ))} - - - - - - - ); -} diff --git a/frontend/src/components/DMSentry.tsx b/frontend/src/components/DMSentry.tsx deleted file mode 100644 index 7a9c23ee..00000000 --- a/frontend/src/components/DMSentry.tsx +++ /dev/null @@ -1,47 +0,0 @@ -/* - This component is a UI that allows a user to enter a latitude or longitude using - degrees, minutes, and seconds. -*/ - -import { useEffect, useState } from "react"; - -// Create a component that takes a label (latitude or longitude) and a callback function -// That will be called when the user enters a valid latitude or longitude. -export default function DMSentry({ dms_label, onChange }: any) { - const [degrees, setDegrees] = useState(0); - const [minutes, setMinutes] = useState(0); - const [seconds, setSeconds] = useState(0); - const [decimal_degrees, setDecimalDegrees] = useState(0); - - const calculateDecimalDegrees = (d: number, m: number, s: number) => - d + m / 60 + s / 3600; - - useEffect(() => { - setDecimalDegrees(calculateDecimalDegrees(degrees, minutes, seconds)); - onChange(decimal_degrees); - }, [degrees, minutes, seconds]); - - return ( -
- {dms_label}: - setDegrees(Number(e.target.value))} - type="number" - /> - ° - setMinutes(Number(e.target.value))} - type="number" - /> - ′ - setSeconds(Number(e.target.value))} - type="number" - /> - ″″ -
- ); -} diff --git a/frontend/src/components/LinkBehavior.tsx b/frontend/src/components/LinkBehavior.tsx deleted file mode 100644 index e32b008f..00000000 --- a/frontend/src/components/LinkBehavior.tsx +++ /dev/null @@ -1,17 +0,0 @@ -import { Link as RouterLink, createLink } from "@tanstack/react-router"; -import { Link as MuiLink, type LinkProps as MuiLinkProps } from "@mui/material"; -import { forwardRef } from "react"; - -// MUI expects the component to forwardRef to an element -export const LinkBehavior = forwardRef< - HTMLAnchorElement, - React.ComponentProps ->(function LinkBehavior(props, ref) { - return ; -}); - -const MUILinkComponent = forwardRef( - (props, ref) => , -); - -export const RouterMuiLink = createLink(MUILinkComponent); diff --git a/frontend/src/components/MeterTypeSelect.tsx b/frontend/src/components/MeterTypeSelect.tsx deleted file mode 100644 index d2be1e64..00000000 --- a/frontend/src/components/MeterTypeSelect.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { FormControl, InputLabel, MenuItem, Select } from "@mui/material"; -import { useGetMeterTypeList } from "@/service"; -import { MeterTypeLU } from "@/interfaces"; - -export default function MeterTypeSelect({ - selectedMeterTypeID, - setSelectedMeterTypeID, - ...childProps -}: any) { - const meterTypeList = useGetMeterTypeList(); - - return ( - - Meter Type - - - ); -} diff --git a/frontend/src/components/ModalBackgroundBox.tsx b/frontend/src/components/ModalBackgroundBox.tsx deleted file mode 100644 index 23681499..00000000 --- a/frontend/src/components/ModalBackgroundBox.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import React from "react"; -import { Box, BoxProps } from "@mui/material"; - -export const ModalBackgroundBox: React.FC = ({ - children, - sx, - ...rest -}) => { - return ( - - {children} - - ); -}; diff --git a/frontend/src/components/MergeWellModal.tsx b/frontend/src/components/Modals/MergeWellModal.tsx similarity index 98% rename from frontend/src/components/MergeWellModal.tsx rename to frontend/src/components/Modals/MergeWellModal.tsx index 0afaf310..69eb3466 100644 --- a/frontend/src/components/MergeWellModal.tsx +++ b/frontend/src/components/Modals/MergeWellModal.tsx @@ -3,7 +3,7 @@ import { useState, useEffect } from "react"; import { useMergeWells } from "@/service"; import { Well } from "@/interfaces"; -import WellSelection from "./WellSelection"; +import WellSelection from "../forms/WellSelection"; export function MergeWellModal({ isWellMergeModalOpen, diff --git a/frontend/src/components/RHControlled/NSPChipSelect.tsx b/frontend/src/components/RHControlled/NSPChipSelect.tsx deleted file mode 100644 index 10bba9ba..00000000 --- a/frontend/src/components/RHControlled/NSPChipSelect.tsx +++ /dev/null @@ -1,91 +0,0 @@ -import { Controller } from "react-hook-form"; -import { NoteTypeLU, ServiceTypeLU, PartTypeLU } from "@/interfaces"; -import { - useGetNoteTypes, - useGetServiceTypes, - useGetPartTypeList, -} from "@/service"; - -import ChipSelect from "../ChipSelect"; - -type SelectType = "Notes" | "Services" | "Parts"; - -export default function NSPChipSelect({ - name, - control, - select_type, -}: { - name: string; - control: any; - select_type: SelectType; -}) { - function getOptions() { - switch (select_type) { - case "Notes": - return useGetNoteTypes(); - case "Services": - return useGetServiceTypes(); - case "Parts": - return useGetPartTypeList(); - } - } - - //Function that takes an item from the options list and converts it to a chip item - //Chip select item is simply {id: number, name: string} - function convertChipItems(items: any) { - switch (select_type) { - case "Notes": - return items.map((note: NoteTypeLU) => ({ - id: note.id, - name: note.note, - })); - case "Services": - return items.map((service: ServiceTypeLU) => ({ - id: service.id, - name: service.service_name, - })); - case "Parts": - return items.map((part: PartTypeLU) => ({ - id: part.id, - name: part.name, - })); - } - } - - //Find an option in the option list by id - function findOptionById(id: number, optionsList: any) { - return optionsList.data?.find((option: any) => option.id === id); - } - - const optionsList = getOptions(); - - return ( - { - console.log(field); - return ( - { - field.onChange([ - ...field.value, - findOptionById(selected_id, optionsList), - ]); - }} - onDelete={(delete_id) => { - field.onChange( - field.value.filter((option: any) => option.id !== delete_id), - ); - }} - /> - ); - }} - /> - ); -} diff --git a/frontend/src/components/ReportsNavItem.tsx b/frontend/src/components/ReportsNavItem.tsx deleted file mode 100644 index 7b899b81..00000000 --- a/frontend/src/components/ReportsNavItem.tsx +++ /dev/null @@ -1,73 +0,0 @@ -import { Dispatch, SetStateAction, useState } from "react"; -import { - ListItem, - ListItemButton, - ListItemIcon, - ListItemText, -} from "@mui/material"; -import { Assessment, ExpandLess, ExpandMore } from "@mui/icons-material"; -import { useNavigate } from "@tanstack/react-router"; -import { useIsActiveRoute } from "@/hooks"; - -export function ReportsNavItem({ - open, - setOpen, -}: { - open: boolean; - setOpen: Dispatch>; -}) { - const navigate = useNavigate(); - const [clickTimer, setClickTimer] = useState(null); - const isActive = useIsActiveRoute("/reports"); - - const handleClick = () => { - if (clickTimer) { - clearTimeout(clickTimer); - setClickTimer(null); - } - const timer = setTimeout(() => { - setOpen((prev) => !prev); - setClickTimer(null); - }, 200); - setClickTimer(timer); - }; - - const handleDoubleClick = (e: React.MouseEvent) => { - if (clickTimer) { - clearTimeout(clickTimer); - setClickTimer(null); - } - e.stopPropagation(); - setOpen(false); - navigate({ to: "/reports", search: {} }); - }; - - return ( - - - - - - - {open ? : } - - - ); -} diff --git a/frontend/src/components/DirectionCard.tsx b/frontend/src/components/display/DirectionCard.tsx similarity index 100% rename from frontend/src/components/DirectionCard.tsx rename to frontend/src/components/display/DirectionCard.tsx diff --git a/frontend/src/components/EventTypeChip.tsx b/frontend/src/components/display/EventTypeChip.tsx similarity index 100% rename from frontend/src/components/EventTypeChip.tsx rename to frontend/src/components/display/EventTypeChip.tsx diff --git a/frontend/src/components/IsTrueChip.tsx b/frontend/src/components/display/IsTrueChip.tsx similarity index 100% rename from frontend/src/components/IsTrueChip.tsx rename to frontend/src/components/display/IsTrueChip.tsx diff --git a/frontend/src/components/PlotContextMenu.tsx b/frontend/src/components/display/PlotContextMenu.tsx similarity index 100% rename from frontend/src/components/PlotContextMenu.tsx rename to frontend/src/components/display/PlotContextMenu.tsx diff --git a/frontend/src/components/RoleChip.tsx b/frontend/src/components/display/RoleChip.tsx similarity index 100% rename from frontend/src/components/RoleChip.tsx rename to frontend/src/components/display/RoleChip.tsx diff --git a/frontend/src/components/StatCell.tsx b/frontend/src/components/display/StatCell.tsx similarity index 100% rename from frontend/src/components/StatCell.tsx rename to frontend/src/components/display/StatCell.tsx diff --git a/frontend/src/components/StyledToggleButton.tsx b/frontend/src/components/display/StyledToggleButton.tsx similarity index 100% rename from frontend/src/components/StyledToggleButton.tsx rename to frontend/src/components/display/StyledToggleButton.tsx diff --git a/frontend/src/components/TabPanel.tsx b/frontend/src/components/display/TabPanel.tsx similarity index 100% rename from frontend/src/components/TabPanel.tsx rename to frontend/src/components/display/TabPanel.tsx diff --git a/frontend/src/components/TristateToggle.tsx b/frontend/src/components/display/TristateToggle.tsx similarity index 100% rename from frontend/src/components/TristateToggle.tsx rename to frontend/src/components/display/TristateToggle.tsx diff --git a/frontend/src/components/ChipSelect.tsx b/frontend/src/components/forms/ChipSelect.tsx similarity index 100% rename from frontend/src/components/ChipSelect.tsx rename to frontend/src/components/forms/ChipSelect.tsx diff --git a/frontend/src/components/MeterRegisterSelect.tsx b/frontend/src/components/forms/MeterRegisterSelect.tsx similarity index 100% rename from frontend/src/components/MeterRegisterSelect.tsx rename to frontend/src/components/forms/MeterRegisterSelect.tsx diff --git a/frontend/src/components/MeterSelection.tsx b/frontend/src/components/forms/MeterSelection.tsx similarity index 100% rename from frontend/src/components/MeterSelection.tsx rename to frontend/src/components/forms/MeterSelection.tsx diff --git a/frontend/src/components/UserSelection.tsx b/frontend/src/components/forms/UserSelection.tsx similarity index 100% rename from frontend/src/components/UserSelection.tsx rename to frontend/src/components/forms/UserSelection.tsx diff --git a/frontend/src/components/WellSelection.tsx b/frontend/src/components/forms/WellSelection.tsx similarity index 100% rename from frontend/src/components/WellSelection.tsx rename to frontend/src/components/forms/WellSelection.tsx diff --git a/frontend/src/components/WorkOrderSelect.tsx b/frontend/src/components/forms/WorkOrderSelect.tsx similarity index 100% rename from frontend/src/components/WorkOrderSelect.tsx rename to frontend/src/components/forms/WorkOrderSelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledActivitySelect.tsx b/frontend/src/components/forms/controlled/ControlledActivitySelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledActivitySelect.tsx rename to frontend/src/components/forms/controlled/ControlledActivitySelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledAutocomplete.tsx b/frontend/src/components/forms/controlled/ControlledAutocomplete.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledAutocomplete.tsx rename to frontend/src/components/forms/controlled/ControlledAutocomplete.tsx diff --git a/frontend/src/components/RHControlled/ControlledCheckbox.tsx b/frontend/src/components/forms/controlled/ControlledCheckbox.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledCheckbox.tsx rename to frontend/src/components/forms/controlled/ControlledCheckbox.tsx diff --git a/frontend/src/components/RHControlled/ControlledDMS.tsx b/frontend/src/components/forms/controlled/ControlledDMS.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledDMS.tsx rename to frontend/src/components/forms/controlled/ControlledDMS.tsx diff --git a/frontend/src/components/RHControlled/ControlledDatepicker.tsx b/frontend/src/components/forms/controlled/ControlledDatepicker.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledDatepicker.tsx rename to frontend/src/components/forms/controlled/ControlledDatepicker.tsx diff --git a/frontend/src/components/RHControlled/ControlledMeterRegisterSelect.tsx b/frontend/src/components/forms/controlled/ControlledMeterRegisterSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledMeterRegisterSelect.tsx rename to frontend/src/components/forms/controlled/ControlledMeterRegisterSelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledMeterSelection.tsx b/frontend/src/components/forms/controlled/ControlledMeterSelection.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledMeterSelection.tsx rename to frontend/src/components/forms/controlled/ControlledMeterSelection.tsx diff --git a/frontend/src/components/RHControlled/ControlledMeterStatusTypeSelect.tsx b/frontend/src/components/forms/controlled/ControlledMeterStatusTypeSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledMeterStatusTypeSelect.tsx rename to frontend/src/components/forms/controlled/ControlledMeterStatusTypeSelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledMeterTypeSelect.tsx b/frontend/src/components/forms/controlled/ControlledMeterTypeSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledMeterTypeSelect.tsx rename to frontend/src/components/forms/controlled/ControlledMeterTypeSelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledPartTypeSelect.tsx b/frontend/src/components/forms/controlled/ControlledPartTypeSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledPartTypeSelect.tsx rename to frontend/src/components/forms/controlled/ControlledPartTypeSelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledSelect.tsx b/frontend/src/components/forms/controlled/ControlledSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledSelect.tsx rename to frontend/src/components/forms/controlled/ControlledSelect.tsx diff --git a/frontend/src/components/RHControlled/ControlledTextbox.tsx b/frontend/src/components/forms/controlled/ControlledTextbox.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledTextbox.tsx rename to frontend/src/components/forms/controlled/ControlledTextbox.tsx diff --git a/frontend/src/components/RHControlled/ControlledTimepicker.tsx b/frontend/src/components/forms/controlled/ControlledTimepicker.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledTimepicker.tsx rename to frontend/src/components/forms/controlled/ControlledTimepicker.tsx diff --git a/frontend/src/components/RHControlled/ControlledUserSelect.tsx b/frontend/src/components/forms/controlled/ControlledUserSelect.tsx similarity index 99% rename from frontend/src/components/RHControlled/ControlledUserSelect.tsx rename to frontend/src/components/forms/controlled/ControlledUserSelect.tsx index 626d78bb..0b84791b 100644 --- a/frontend/src/components/RHControlled/ControlledUserSelect.tsx +++ b/frontend/src/components/forms/controlled/ControlledUserSelect.tsx @@ -12,7 +12,7 @@ import { } from "react-hook-form"; import { User } from "@/interfaces"; import { useGetUserList } from "@/service"; -import { UserAvatar } from "@/components/UserAvatar"; +import { UserAvatar } from "@/components/media/UserAvatar"; import { getRoleLabel, sortUsersByRoleThenName, diff --git a/frontend/src/components/RHControlled/ControlledWellSelection.tsx b/frontend/src/components/forms/controlled/ControlledWellSelection.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledWellSelection.tsx rename to frontend/src/components/forms/controlled/ControlledWellSelection.tsx diff --git a/frontend/src/components/RHControlled/ControlledWorkOrderSelect.tsx b/frontend/src/components/forms/controlled/ControlledWorkOrderSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ControlledWorkOrderSelect.tsx rename to frontend/src/components/forms/controlled/ControlledWorkOrderSelect.tsx diff --git a/frontend/src/components/RHControlled/NotesChipSelect.tsx b/frontend/src/components/forms/controlled/NotesChipSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/NotesChipSelect.tsx rename to frontend/src/components/forms/controlled/NotesChipSelect.tsx diff --git a/frontend/src/components/RHControlled/PartsChipSelect.tsx b/frontend/src/components/forms/controlled/PartsChipSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/PartsChipSelect.tsx rename to frontend/src/components/forms/controlled/PartsChipSelect.tsx diff --git a/frontend/src/components/RHControlled/ServicesChipSelect.tsx b/frontend/src/components/forms/controlled/ServicesChipSelect.tsx similarity index 100% rename from frontend/src/components/RHControlled/ServicesChipSelect.tsx rename to frontend/src/components/forms/controlled/ServicesChipSelect.tsx diff --git a/frontend/src/components/RHControlled/index.ts b/frontend/src/components/forms/controlled/index.ts similarity index 95% rename from frontend/src/components/RHControlled/index.ts rename to frontend/src/components/forms/controlled/index.ts index 718157aa..bdb87c80 100644 --- a/frontend/src/components/RHControlled/index.ts +++ b/frontend/src/components/forms/controlled/index.ts @@ -15,6 +15,5 @@ export * from './ControlledUserSelect' export * from './ControlledWellSelection' export * from './ControlledWorkOrderSelect' export * from './NotesChipSelect' -export * from './NSPChipSelect' export * from './PartsChipSelect' export * from './ServicesChipSelect' diff --git a/frontend/src/components/index.ts b/frontend/src/components/index.ts index 3ab8e714..81f7025b 100644 --- a/frontend/src/components/index.ts +++ b/frontend/src/components/index.ts @@ -1,42 +1,36 @@ -export * from "./AvatarPicker"; -export * from "./BackgroundBox"; -export * from "./ChipSelect"; -export * from "./CustomCardHeader"; -export * from "./DMSentry"; -export * from "./DirectionCard"; -export * from "./EventTypeChip"; -export * from "./GridFooterWithButton"; -export * from "./ImageDialog"; -export * from "./ImagePreviewGrid"; -export * from "./ImageUploadWithPreview"; -export * from "./IsTrueChip"; -export * from "./Layers"; -export * from "./LinkBehavior"; -export * from "./ManageBreadcrumbTitle"; -export * from "./MapFullscreenToggle"; -export * from "./MapUrlStateSync"; -export * from "./MergeWellModal"; -export * from "./MeterMapColorLegend"; -export * from "./MeterRegisterSelect"; -export * from "./MeterSelection"; -export * from "./MeterTypeSelect"; -export * from "./ModalBackgroundBox"; +export * from "./layout/BackgroundBox"; +export * from "./layout/CustomCardHeader"; +export * from "./layout/GridFooterWithButton"; +export * from "./layout/SectionCard"; +export * from "./layout/Topbar"; +export * from "./layout/TopbarUserButton"; +export * from "./navigation/ManageBreadcrumbTitle"; +export * from "./navigation/NavLink"; +export * from "./navigation/ReportBreadcrumbTitle"; +export * from "./maps/MapFullscreenToggle"; +export * from "./maps/MapUrlStateSync"; +export * from "./maps/MeterMapColorLegend"; +export * from "./maps/WellMapLegend"; +export * from "./maps/layers"; +export * from "./forms/ChipSelect"; +export * from "./forms/MeterRegisterSelect"; +export * from "./forms/MeterSelection"; +export * from "./forms/UserSelection"; +export * from "./forms/WellSelection"; +export * from "./forms/WorkOrderSelect"; +export * from "./forms/controlled"; +export * from "./display/DirectionCard"; +export * from "./display/EventTypeChip"; +export * from "./display/IsTrueChip"; +export * from "./display/RoleChip"; +export * from "./display/StatCell"; +export * from "./display/StyledToggleButton"; +export * from "./display/TabPanel"; +export * from "./display/TristateToggle"; +export * from "./media/ImageDialog"; +export * from "./media/ImagePreviewGrid"; +export * from "./media/ImageUploadWithPreview"; +export * from "./media/UserAvatar"; export * from "./Modals"; -export * from "./NavLink"; -export * from "./ReportsNavItem"; -export * from "./ReportBreadcrumbTitle"; -export * from "./RHControlled"; +export * from "./Modals/MergeWellModal"; export * from "./ResizableSplitPanels"; -export * from "./RoleChip"; -export * from "./StatCell"; -export * from "./SectionCard"; -export * from "./StyledToggleButton"; -export * from "./TabPanel"; -export * from "./Topbar"; -export * from "./TopbarUserButton"; -export * from "./TristateToggle"; -export * from "./UserAvatar"; -export * from "./UserSelection"; -export * from "./WellMapLegend"; -export * from "./WellSelection"; -export * from "./WorkOrderSelect"; diff --git a/frontend/src/components/BackgroundBox.tsx b/frontend/src/components/layout/BackgroundBox.tsx similarity index 100% rename from frontend/src/components/BackgroundBox.tsx rename to frontend/src/components/layout/BackgroundBox.tsx diff --git a/frontend/src/components/CustomCardHeader.tsx b/frontend/src/components/layout/CustomCardHeader.tsx similarity index 100% rename from frontend/src/components/CustomCardHeader.tsx rename to frontend/src/components/layout/CustomCardHeader.tsx diff --git a/frontend/src/components/GridFooterWithButton.tsx b/frontend/src/components/layout/GridFooterWithButton.tsx similarity index 100% rename from frontend/src/components/GridFooterWithButton.tsx rename to frontend/src/components/layout/GridFooterWithButton.tsx diff --git a/frontend/src/components/SectionCard.tsx b/frontend/src/components/layout/SectionCard.tsx similarity index 100% rename from frontend/src/components/SectionCard.tsx rename to frontend/src/components/layout/SectionCard.tsx diff --git a/frontend/src/components/Topbar.tsx b/frontend/src/components/layout/Topbar.tsx similarity index 100% rename from frontend/src/components/Topbar.tsx rename to frontend/src/components/layout/Topbar.tsx diff --git a/frontend/src/components/TopbarUserButton.tsx b/frontend/src/components/layout/TopbarUserButton.tsx similarity index 93% rename from frontend/src/components/TopbarUserButton.tsx rename to frontend/src/components/layout/TopbarUserButton.tsx index a7cf2971..89b2a112 100644 --- a/frontend/src/components/TopbarUserButton.tsx +++ b/frontend/src/components/layout/TopbarUserButton.tsx @@ -1,6 +1,6 @@ import { ButtonProps, IconButton } from "@mui/material"; import { getRoleColor } from "@/utils"; -import { UserAvatar } from "@/components/UserAvatar"; +import { UserAvatar } from "@/components/media/UserAvatar"; export const TopbarUserButton = ({ full_name, diff --git a/frontend/src/components/MapFullscreenToggle.tsx b/frontend/src/components/maps/MapFullscreenToggle.tsx similarity index 100% rename from frontend/src/components/MapFullscreenToggle.tsx rename to frontend/src/components/maps/MapFullscreenToggle.tsx diff --git a/frontend/src/components/MapUrlStateSync.tsx b/frontend/src/components/maps/MapUrlStateSync.tsx similarity index 100% rename from frontend/src/components/MapUrlStateSync.tsx rename to frontend/src/components/maps/MapUrlStateSync.tsx diff --git a/frontend/src/components/MeterMapColorLegend.tsx b/frontend/src/components/maps/MeterMapColorLegend.tsx similarity index 100% rename from frontend/src/components/MeterMapColorLegend.tsx rename to frontend/src/components/maps/MeterMapColorLegend.tsx diff --git a/frontend/src/components/WellMapLegend.tsx b/frontend/src/components/maps/WellMapLegend.tsx similarity index 98% rename from frontend/src/components/WellMapLegend.tsx rename to frontend/src/components/maps/WellMapLegend.tsx index ba5bf5c6..534d9331 100644 --- a/frontend/src/components/WellMapLegend.tsx +++ b/frontend/src/components/maps/WellMapLegend.tsx @@ -3,8 +3,7 @@ import { BlackMapIcon, BlueMapIcon, RedMapIcon, -} from './MapIcons'; - +} from "./icons"; export const WellMapLegend: React.FC = () => { return ( @@ -48,4 +47,3 @@ export const WellMapLegend: React.FC = () => { ); }; - diff --git a/frontend/src/components/MapIcons/Black.tsx b/frontend/src/components/maps/icons/Black.tsx similarity index 79% rename from frontend/src/components/MapIcons/Black.tsx rename to frontend/src/components/maps/icons/Black.tsx index 2b0946ed..6c6b9371 100644 --- a/frontend/src/components/MapIcons/Black.tsx +++ b/frontend/src/components/maps/icons/Black.tsx @@ -1,5 +1,5 @@ import L from "leaflet"; -import iconBlack from "./../../assets/leaflet/marker-icon-black.png"; +import iconBlack from "../../../assets/leaflet/marker-icon-black.png"; import iconShadow from "leaflet/dist/images/marker-shadow.png"; export const BlackMapIcon = L.icon({ diff --git a/frontend/src/components/MapIcons/Blue.tsx b/frontend/src/components/maps/icons/Blue.tsx similarity index 100% rename from frontend/src/components/MapIcons/Blue.tsx rename to frontend/src/components/maps/icons/Blue.tsx diff --git a/frontend/src/components/MapIcons/Red.tsx b/frontend/src/components/maps/icons/Red.tsx similarity index 80% rename from frontend/src/components/MapIcons/Red.tsx rename to frontend/src/components/maps/icons/Red.tsx index 5e8c9352..08adc817 100644 --- a/frontend/src/components/MapIcons/Red.tsx +++ b/frontend/src/components/maps/icons/Red.tsx @@ -1,5 +1,5 @@ import L from "leaflet"; -import iconRed from "./../../assets/leaflet/marker-icon-red.png"; +import iconRed from "../../../assets/leaflet/marker-icon-red.png"; import iconShadow from "leaflet/dist/images/marker-shadow.png"; export const RedMapIcon = L.icon({ diff --git a/frontend/src/components/MapIcons/index.ts b/frontend/src/components/maps/icons/index.ts similarity index 100% rename from frontend/src/components/MapIcons/index.ts rename to frontend/src/components/maps/icons/index.ts diff --git a/frontend/src/components/Layers/BoundariesLayer.tsx b/frontend/src/components/maps/layers/BoundariesLayer.tsx similarity index 100% rename from frontend/src/components/Layers/BoundariesLayer.tsx rename to frontend/src/components/maps/layers/BoundariesLayer.tsx diff --git a/frontend/src/components/Layers/OpenStreetMapLayer.tsx b/frontend/src/components/maps/layers/OpenStreetMapLayer.tsx similarity index 100% rename from frontend/src/components/Layers/OpenStreetMapLayer.tsx rename to frontend/src/components/maps/layers/OpenStreetMapLayer.tsx diff --git a/frontend/src/components/Layers/SatelliteLayer.tsx b/frontend/src/components/maps/layers/SatelliteLayer.tsx similarity index 100% rename from frontend/src/components/Layers/SatelliteLayer.tsx rename to frontend/src/components/maps/layers/SatelliteLayer.tsx diff --git a/frontend/src/components/Layers/SoutheastGuideLayer.tsx b/frontend/src/components/maps/layers/SoutheastGuideLayer.tsx similarity index 100% rename from frontend/src/components/Layers/SoutheastGuideLayer.tsx rename to frontend/src/components/maps/layers/SoutheastGuideLayer.tsx diff --git a/frontend/src/components/Layers/TransportationLayer.tsx b/frontend/src/components/maps/layers/TransportationLayer.tsx similarity index 100% rename from frontend/src/components/Layers/TransportationLayer.tsx rename to frontend/src/components/maps/layers/TransportationLayer.tsx diff --git a/frontend/src/components/Layers/index.ts b/frontend/src/components/maps/layers/index.ts similarity index 100% rename from frontend/src/components/Layers/index.ts rename to frontend/src/components/maps/layers/index.ts diff --git a/frontend/src/components/ImageDialog.tsx b/frontend/src/components/media/ImageDialog.tsx similarity index 100% rename from frontend/src/components/ImageDialog.tsx rename to frontend/src/components/media/ImageDialog.tsx diff --git a/frontend/src/components/ImagePreviewGrid.tsx b/frontend/src/components/media/ImagePreviewGrid.tsx similarity index 100% rename from frontend/src/components/ImagePreviewGrid.tsx rename to frontend/src/components/media/ImagePreviewGrid.tsx diff --git a/frontend/src/components/ImageUploadWithPreview.tsx b/frontend/src/components/media/ImageUploadWithPreview.tsx similarity index 96% rename from frontend/src/components/ImageUploadWithPreview.tsx rename to frontend/src/components/media/ImageUploadWithPreview.tsx index efefffe6..c9003e73 100644 --- a/frontend/src/components/ImageUploadWithPreview.tsx +++ b/frontend/src/components/media/ImageUploadWithPreview.tsx @@ -1,7 +1,8 @@ import { useState } from "react"; import { Grid, Button, Typography, Box } from "@mui/material"; import CloudUploadIcon from "@mui/icons-material/CloudUpload"; -import { ImageDialog, ImagePreviewGrid } from "./"; +import { ImageDialog } from "./ImageDialog"; +import { ImagePreviewGrid } from "./ImagePreviewGrid"; import { enqueueSnackbar } from "notistack"; const MAX_FILE_SIZE = 5 * 1024 * 1024; // 5 MB @@ -124,7 +125,7 @@ export const ImageUploadWithPreview = ({ { + onOpen={(src: string) => { setSelectedImage(src); setDialogOpen(true); }} diff --git a/frontend/src/components/UserAvatar.tsx b/frontend/src/components/media/UserAvatar.tsx similarity index 100% rename from frontend/src/components/UserAvatar.tsx rename to frontend/src/components/media/UserAvatar.tsx diff --git a/frontend/src/components/ManageBreadcrumbTitle.tsx b/frontend/src/components/navigation/ManageBreadcrumbTitle.tsx similarity index 100% rename from frontend/src/components/ManageBreadcrumbTitle.tsx rename to frontend/src/components/navigation/ManageBreadcrumbTitle.tsx diff --git a/frontend/src/components/NavLink.tsx b/frontend/src/components/navigation/NavLink.tsx similarity index 100% rename from frontend/src/components/NavLink.tsx rename to frontend/src/components/navigation/NavLink.tsx diff --git a/frontend/src/components/ReportBreadcrumbTitle.tsx b/frontend/src/components/navigation/ReportBreadcrumbTitle.tsx similarity index 100% rename from frontend/src/components/ReportBreadcrumbTitle.tsx rename to frontend/src/components/navigation/ReportBreadcrumbTitle.tsx diff --git a/frontend/src/components/SessionStatusPoller.tsx b/frontend/src/components/session/SessionStatusPoller.tsx similarity index 100% rename from frontend/src/components/SessionStatusPoller.tsx rename to frontend/src/components/session/SessionStatusPoller.tsx diff --git a/frontend/src/views/Chlorides/ChloridesPlot.tsx b/frontend/src/views/Chlorides/ChloridesPlot.tsx index a6c73401..85389b81 100644 --- a/frontend/src/views/Chlorides/ChloridesPlot.tsx +++ b/frontend/src/views/Chlorides/ChloridesPlot.tsx @@ -2,7 +2,7 @@ import { useEffect, useMemo, useRef, useState } from "react"; import { Box, CircularProgress, Typography } from "@mui/material"; import ReactPlot from "react-plotly.js"; import type { Data } from "plotly.js"; -import { PlotContextMenu } from "../../components/PlotContextMenu"; +import { PlotContextMenu } from "@/components/display/PlotContextMenu"; export const Plot = ({ manual_dates, diff --git a/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx b/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx index 18011cd0..5a3288a1 100644 --- a/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx +++ b/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx @@ -2,7 +2,7 @@ import { useEffect, useMemo, useRef, useState } from "react"; import { Box, CircularProgress, Typography } from "@mui/material"; import ReactPlot from "react-plotly.js"; import type { Data } from "plotly.js"; -import { PlotContextMenu } from "../../components/PlotContextMenu"; +import { PlotContextMenu } from "@/components/display/PlotContextMenu"; export const Plot = ({ manual_dates, diff --git a/frontend/src/views/Parts/PartsHistory.tsx b/frontend/src/views/Parts/PartsHistory.tsx index 47980660..bb6f79e5 100644 --- a/frontend/src/views/Parts/PartsHistory.tsx +++ b/frontend/src/views/Parts/PartsHistory.tsx @@ -44,7 +44,6 @@ import { ControlledDatepicker, ControlledSelectNonObject, IncreaseQuantityModal, - RouterMuiLink, } from "@/components"; import { useAddParts, @@ -93,6 +92,10 @@ const defaultSchema = { event_types: [...EVENT_TYPE_ORDER] as EventType[], }; +const RouterMuiLink = (props: any) => ( + +); + function normalizeEventTypes(input: unknown): EventType[] { const values = Array.isArray(input) ? input : []; const set = new Set(values); diff --git a/frontend/src/views/Reports/Chlorides/index.tsx b/frontend/src/views/Reports/Chlorides/index.tsx index 6f81e691..720caa7c 100644 --- a/frontend/src/views/Reports/Chlorides/index.tsx +++ b/frontend/src/views/Reports/Chlorides/index.tsx @@ -44,7 +44,7 @@ import { TransporationLayer, BoundariesLayer, } from "@/components"; -import { RedMapIcon, BlackMapIcon } from "@/components/MapIcons"; +import { RedMapIcon, BlackMapIcon } from "@/components/maps/icons"; import { useFetchWithAuth } from "@/hooks"; import { useGetWellLocations } from "@/service"; import { Well } from "@/interfaces"; diff --git a/frontend/src/views/Settings.tsx b/frontend/src/views/Settings.tsx index dfeb541d..51cbe226 100644 --- a/frontend/src/views/Settings.tsx +++ b/frontend/src/views/Settings.tsx @@ -9,7 +9,6 @@ import { Box, Button, Chip, - Divider, Grid, IconButton, InputAdornment, diff --git a/frontend/src/views/WellManagement/WellSelectionMap.tsx b/frontend/src/views/WellManagement/WellSelectionMap.tsx index 541f6afe..3e014b8e 100644 --- a/frontend/src/views/WellManagement/WellSelectionMap.tsx +++ b/frontend/src/views/WellManagement/WellSelectionMap.tsx @@ -16,7 +16,7 @@ import { TransporationLayer, WellMapLegend, } from "@/components"; -import { BlueMapIcon, RedMapIcon, BlackMapIcon } from "@/components/MapIcons"; +import { BlueMapIcon, RedMapIcon, BlackMapIcon } from "@/components/maps/icons"; import { WellStatus } from "@/enums"; import L from "leaflet"; From cf0f278124f777f39ea6652279e381e35b4f4869 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 20 Mar 2026 16:14:43 -0500 Subject: [PATCH 05/22] feat(Settings): Update ui --- .../src/components/layout/SectionCard.tsx | 2 +- frontend/src/routes/settings.tsx | 9 + frontend/src/views/Settings.tsx | 872 +++--------------- .../components/KnownDevicesSection.tsx | 117 +++ .../components/PreferencesSection.tsx | 141 +++ .../Settings/components/ProfileSection.tsx | 219 +++++ .../Settings/components/SectionSurface.tsx | 48 + .../Settings/components/SecuritySection.tsx | 159 ++++ .../components/SessionHistorySection.tsx | 201 ++++ .../Settings/components/SessionShared.tsx | 130 +++ .../src/views/Settings/components/index.ts | 6 + 11 files changed, 1139 insertions(+), 765 deletions(-) create mode 100644 frontend/src/views/Settings/components/KnownDevicesSection.tsx create mode 100644 frontend/src/views/Settings/components/PreferencesSection.tsx create mode 100644 frontend/src/views/Settings/components/ProfileSection.tsx create mode 100644 frontend/src/views/Settings/components/SectionSurface.tsx create mode 100644 frontend/src/views/Settings/components/SecuritySection.tsx create mode 100644 frontend/src/views/Settings/components/SessionHistorySection.tsx create mode 100644 frontend/src/views/Settings/components/SessionShared.tsx create mode 100644 frontend/src/views/Settings/components/index.ts diff --git a/frontend/src/components/layout/SectionCard.tsx b/frontend/src/components/layout/SectionCard.tsx index 080aa413..3ad9f498 100644 --- a/frontend/src/components/layout/SectionCard.tsx +++ b/frontend/src/components/layout/SectionCard.tsx @@ -8,7 +8,7 @@ export function SectionCard({ children, }: { title: string; - description: string; + description?: string; icon: typeof SvgIcon; children: React.ReactNode; }) { diff --git a/frontend/src/routes/settings.tsx b/frontend/src/routes/settings.tsx index 5a5fef59..f942d7d8 100644 --- a/frontend/src/routes/settings.tsx +++ b/frontend/src/routes/settings.tsx @@ -1,8 +1,17 @@ import { createFileRoute } from "@tanstack/react-router"; +import { z } from "zod"; import { Settings } from "@/views"; import { ProtectedRoute } from "@/ProtectedRoute"; +import { booleanParam, routeSearchHydrator } from "@/utils"; + +const searchSchema = z.object({ + showClosedSessions: booleanParam(false), +}); export const Route = createFileRoute("/settings")({ + validateSearch: searchSchema, + beforeLoad: ({ search, location }) => + routeSearchHydrator(location.pathname, search, location.searchStr), component: () => ( diff --git a/frontend/src/views/Settings.tsx b/frontend/src/views/Settings.tsx index 51cbe226..de271acf 100644 --- a/frontend/src/views/Settings.tsx +++ b/frontend/src/views/Settings.tsx @@ -2,57 +2,31 @@ import { useEffect, useMemo, useState } from "react"; import * as yup from "yup"; import { enqueueSnackbar } from "notistack"; import { yupResolver } from "@hookform/resolvers/yup"; -import { Controller, useForm } from "react-hook-form"; +import { useForm } from "react-hook-form"; +import { Grid, Stack } from "@mui/material"; import { - Alert, - Avatar, - Box, - Button, - Chip, - Grid, - IconButton, - InputAdornment, - ListItemIcon, - MenuItem, - Skeleton, - Stack, - TextField, - Typography, -} from "@mui/material"; -import { alpha } from "@mui/material/styles"; -import { - Check, - CheckCircleOutline, - DeleteOutline, DevicesRounded, - Edit, HistoryRounded, - LaptopMacRounded, - PhoneIphoneRounded, SettingsApplications, ShieldOutlined, - TabletMacRounded, - Visibility, - VisibilityOff, } from "@mui/icons-material"; -import { useAuthUser, useSignIn } from "react-auth-kit"; +import { useSignIn, useAuthUser } from "react-auth-kit"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { - BackgroundBox, - ImageUploadWithPreview, - RoleChip, - SectionCard, -} from "@/components"; +import { useNavigate } from "@tanstack/react-router"; +import { BackgroundBox, SectionCard } from "@/components"; import { navConfig } from "@/constants"; import { useFetchWithAuth } from "@/hooks"; -import { - KnownDeviceSummary, - SecurityScope, - UserSessionSummary, - UserSessionsResponse, -} from "@/interfaces"; +import { SecurityScope, UserSessionsResponse } from "@/interfaces"; +import { Route } from "@/routes/settings"; import { clearSavedQueryLocalStorage } from "@/service"; import { getTrackedSession } from "@/utils/SessionTracking"; +import { + KnownDevicesSection, + PreferencesSection, + ProfileSection, + SecuritySection, + SessionHistorySection, +} from "./Settings/components"; const redirectSchema = yup.object().shape({ redirect_page: yup.string().required("Please select a redirect page"), @@ -70,295 +44,9 @@ const passwordSchema = yup.object().shape({ .required("Please confirm new password"), }); -function formatDateTime(value?: string | null) { - if (!value) return "Not available"; - - return new Intl.DateTimeFormat("en-US", { - dateStyle: "medium", - timeStyle: "short", - }).format(new Date(value)); -} - -function formatRelativeTime(value?: string | null) { - if (!value) return "Unknown"; - - const timestamp = new Date(value).getTime(); - const diffMs = timestamp - Date.now(); - const absMinutes = Math.round(Math.abs(diffMs) / (1000 * 60)); - - if (absMinutes < 1) return "Just now"; - if (absMinutes < 60) { - return `${absMinutes} minute${absMinutes === 1 ? "" : "s"} ${ - diffMs >= 0 ? "from now" : "ago" - }`; - } - - const absHours = Math.round(absMinutes / 60); - if (absHours < 24) { - return `${absHours} hour${absHours === 1 ? "" : "s"} ${ - diffMs >= 0 ? "from now" : "ago" - }`; - } - - const absDays = Math.round(absHours / 24); - return `${absDays} day${absDays === 1 ? "" : "s"} ${ - diffMs >= 0 ? "from now" : "ago" - }`; -} - -function formatReasonLabel(value?: string | null) { - if (!value) return ""; - return value.split("_").join(" "); -} - -function getDeviceIcon(deviceType?: string | null) { - switch (deviceType) { - case "Mobile": - return PhoneIphoneRounded; - case "Tablet": - return TabletMacRounded; - default: - return LaptopMacRounded; - } -} - -function InfoTile({ label, value }: { label: string; value: React.ReactNode }) { - return ( - - - {label} - - - {typeof value === "string" ? ( - - ) : ( - value - )} - - - ); -} - -function SessionRow({ - session, - onCloseSession, - isClosing, -}: { - session: UserSessionSummary; - onCloseSession: (sessionIdentifier: string) => void; - isClosing: boolean; -}) { - const DeviceIcon = getDeviceIcon(session.device_type); - const statusColor = session.is_active ? "success" : "default"; - - return ( - - - - - - - - - - {session.device_label || "Unknown device"} - - - {[session.browser, session.operating_system, session.ip_address] - .filter(Boolean) - .join(" • ") || "No device details available"} - - - - - - - - - - - - Signed in - - - {formatDateTime(session.signed_in_at)} - - - - - Last seen - - - {formatDateTime(session.last_seen_at)} ( - {formatRelativeTime(session.last_seen_at)}) - - - - - Sign-out status - - - {session.signed_out_at - ? `${formatDateTime(session.signed_out_at)}${ - session.sign_out_reason_name - ? ` • ${formatReasonLabel(session.sign_out_reason_name)}` - : "" - }` - : "Still active"} - - - - - - - - - - ); -} - -function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { - const DeviceIcon = getDeviceIcon(device.device_type); - - return ( - - - - - - - - - - {device.device_label || "Unknown device"} - - - {[device.browser, device.operating_system, device.device_type] - .filter(Boolean) - .join(" • ") || "No device details available"} - - - - {device.is_current_device ? ( - } - label="Current device" - /> - ) : null} - - - - - - Sessions - - {device.session_count} - - - - Active now - - - {device.active_session_count} - - - - - First seen - - - {formatDateTime(device.signed_in_at_first)} - - - - - Last seen - - - {formatDateTime(device.last_seen_at)} - - - - - - ); -} - export const Settings = () => { + const navigate = useNavigate(); + const search = Route.useSearch(); const authUser = useAuthUser(); const user = authUser(); const signIn = useSignIn(); @@ -385,6 +73,14 @@ export const Settings = () => { [hasAdminScope, hasReadScope], ); + const setSearch = (updater: (prev: typeof search) => typeof search) => { + navigate({ + to: "/settings", + search: (prev) => updater(prev as typeof search), + replace: true, + }); + }; + const [isEditing, setIsEditing] = useState(false); const [avatarFiles, setAvatarFiles] = useState([]); const [avatarUploadKey, setAvatarUploadKey] = useState(0); @@ -480,7 +176,7 @@ export const Settings = () => { handleSubmit: handleRedirectSubmit, reset: redirectReset, watch: watchRedirectPage, - } = useForm({ + } = useForm<{ redirect_page: string }>({ resolver: yupResolver(redirectSchema), defaultValues: { redirect_page: getRedirectPageQuery?.data?.redirect_page ?? "/", @@ -553,6 +249,7 @@ export const Settings = () => { const onPasswordSubmit = (data: { currentPassword: string; newPassword: string; + confirmPassword: string; }) => { passwordMutation.mutate({ currentPassword: data.currentPassword, @@ -676,6 +373,16 @@ export const Settings = () => { [trackedSession?.sessionIdentifier, userSessionsQuery.data?.sessions], ); + const activeSessions = useMemo( + () => sessions.filter((session) => session.is_active), + [sessions], + ); + + const closedSessions = useMemo( + () => sessions.filter((session) => !session.is_active), + [sessions], + ); + const knownDevices = useMemo( () => userSessionsQuery.data?.known_devices ?? [], [userSessionsQuery.data?.known_devices], @@ -709,271 +416,40 @@ export const Settings = () => { description="Review your account information and keep your profile up to date." icon={ShieldOutlined} > - - - - - - - - - - - - } - /> - - - - - - Display name - - - This is how your name appears across the application. - - - - {!isEditing ? ( - - - - - ) : ( - - ( - - )} - /> - - - - )} - - - - - - - Avatar - - - Upload or replace your account image. - - - - - - - - - - - - - - - - -
- - - { + displayNameReset({ + display_name: user?.display_name ?? "", + }); + setIsEditing(false); }} - > - - - - Default landing page - - - Choose where the app should take you after you sign in. - - - {getRedirectPageQuery.isLoading ? ( - - ) : ( - ( - - {redirectOptions.map((route) => { - const RouteIcon = route.icon; - - return ( - - - - - {route.label} - - ); - })} - - )} - /> - )} - - - - - + onSaveDisplayName={displayNameHandleSubmit(onDisplayNameSubmit)} + isSavingDisplayName={displayNameMutation.isLoading} + avatarFiles={avatarFiles} + setAvatarFiles={setAvatarFiles} + avatarUploadKey={avatarUploadKey} + onAvatarSubmit={onAvatarSubmit} + onClearAvatar={() => clearAvatarMutation.mutate()} + isSavingAvatar={avatarMutation.isLoading} + isRemovingAvatar={clearAvatarMutation.isLoading} + /> + - - - - - Cached map data - - - Clear saved client-side caches if the app feels out of - sync. - - - - - + + { description="Update your password and review account access posture." icon={ShieldOutlined} > - - - - Change password - - ( - - - setShowCurrentPassword((current) => !current) - } - edge="end" - > - {showCurrentPassword ? ( - - ) : ( - - )} - - - ), - }} - /> - )} - /> - ( - - - setShowNewPassword((current) => !current) - } - edge="end" - > - {showNewPassword ? ( - - ) : ( - - )} - - - ), - }} - /> - )} - /> - ( - - - setShowConfirmPassword((current) => !current) - } - edge="end" - > - {showConfirmPassword ? ( - - ) : ( - - )} - - - ), - }} - /> - )} - /> - - - - - + @@ -1111,61 +481,35 @@ export const Settings = () => { description="Review sign-ins across devices and close sessions that are no longer needed." icon={HistoryRounded} > - {userSessionsQuery.isLoading ? ( - - - - - - ) : userSessionsQuery.isError ? ( - - Unable to load session history right now. - - ) : sessions.length === 0 ? ( - No recorded sessions were found. - ) : ( - - {sessions.map((session) => ( - - closeSessionMutation.mutate(sessionIdentifier) - } - /> - ))} - - )} + + setSearch((prev) => ({ + ...prev, + showClosedSessions: nextValue, + })) + } + closeSession={(sessionIdentifier) => + closeSessionMutation.mutate(sessionIdentifier) + } + closingSessionIdentifier={ + closeSessionMutation.isLoading + ? closeSessionMutation.variables + : undefined + } + /> - - {userSessionsQuery.isLoading ? ( - - - - - ) : userSessionsQuery.isError ? ( - - Unable to load known devices right now. - - ) : knownDevices.length === 0 ? ( - No known devices were found. - ) : ( - - {knownDevices.map((device) => ( - - ))} - - )} + + diff --git a/frontend/src/views/Settings/components/KnownDevicesSection.tsx b/frontend/src/views/Settings/components/KnownDevicesSection.tsx new file mode 100644 index 00000000..73330e95 --- /dev/null +++ b/frontend/src/views/Settings/components/KnownDevicesSection.tsx @@ -0,0 +1,117 @@ +import { Alert, Box, Skeleton, Stack } from "@mui/material"; +import { alpha } from "@mui/material/styles"; +import { KnownDeviceSummary } from "@/interfaces"; +import { SectionSurface } from "./SectionSurface"; +import { + SessionDeviceIdentity, + StatusChip, + formatDateTime, + getDeviceIcon, +} from "./SessionShared"; + +function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { + const DeviceIcon = getDeviceIcon(device.device_type); + + return ( + + + + + {device.is_current_device ? ( + + ) : null} + + + + + Sessions + {device.session_count} + + + Active now + {device.active_session_count} + + + First seen + + {formatDateTime(device.signed_in_at_first)} + + + + Last seen + + {formatDateTime(device.last_seen_at)} + + + + + + ); +} + +export function KnownDevicesSection({ + isLoading, + isError, + knownDevices, +}: { + isLoading: boolean; + isError: boolean; + knownDevices: KnownDeviceSummary[]; +}) { + return ( + <> + {isLoading ? ( + + + + + ) : isError ? ( + Unable to load known devices right now. + ) : knownDevices.length === 0 ? ( + No known devices were found. + ) : ( + + {knownDevices.map((device) => ( + + ))} + + )} + + ); +} diff --git a/frontend/src/views/Settings/components/PreferencesSection.tsx b/frontend/src/views/Settings/components/PreferencesSection.tsx new file mode 100644 index 00000000..52423ac7 --- /dev/null +++ b/frontend/src/views/Settings/components/PreferencesSection.tsx @@ -0,0 +1,141 @@ +import { Controller, type Control } from "react-hook-form"; +import { + Box, + Button, + ListItemIcon, + MenuItem, + Skeleton, + Stack, + TextField, + type SvgIconProps, +} from "@mui/material"; +import type { ComponentType } from "react"; +import { SectionSurface } from "./SectionSurface"; + +type NavOption = { + path: string; + label: string; + icon: ComponentType; +}; + +export function PreferencesSection({ + redirectControl, + redirectOptions, + onRedirectSubmit, + isRedirectLoading, + isRedirectSaving, + isRedirectSelectionUnchanged, + onClearCachedData, + isClearingCachedData, +}: { + redirectControl: Control<{ redirect_page: string }>; + redirectOptions: NavOption[]; + onRedirectSubmit: React.FormEventHandler; + isRedirectLoading: boolean; + isRedirectSaving: boolean; + isRedirectSelectionUnchanged: boolean; + onClearCachedData: () => void; + isClearingCachedData: boolean; +}) { + return ( + + + + + {isRedirectLoading ? ( + + ) : ( + ( + + {redirectOptions.map((route) => { + const RouteIcon = route.icon; + + return ( + + + + + + + {route.label} + + + + ); + })} + + )} + /> + )} + + + + + + + + + Clear cache + + } + > + + + + ); +} diff --git a/frontend/src/views/Settings/components/ProfileSection.tsx b/frontend/src/views/Settings/components/ProfileSection.tsx new file mode 100644 index 00000000..aae63df1 --- /dev/null +++ b/frontend/src/views/Settings/components/ProfileSection.tsx @@ -0,0 +1,219 @@ +import { Controller, type Control } from "react-hook-form"; +import { + Box, + Button, + Chip, + Grid, + Stack, + TextField, + Typography, +} from "@mui/material"; +import { Edit } from "@mui/icons-material"; +import { ImageUploadWithPreview, RoleChip } from "@/components"; +import { SectionSurface } from "./SectionSurface"; + +type SettingsUser = { + full_name?: string; + email?: string; + username?: string; + display_name?: string; + avatar_img?: string | null; + user_role?: { + name?: string; + } | null; +}; + +function InfoTile({ label, value }: { label: string; value: React.ReactNode }) { + return ( + + + {label} + + + {typeof value === "string" ? ( + + ) : ( + value + )} + + + ); +} + +export function ProfileSection({ + user, + isEditing, + setIsEditing, + displayNameControl, + onCancelEdit, + onSaveDisplayName, + isSavingDisplayName, + avatarFiles, + setAvatarFiles, + avatarUploadKey, + onAvatarSubmit, + onClearAvatar, + isSavingAvatar, + isRemovingAvatar, +}: { + user: SettingsUser | null; + isEditing: boolean; + setIsEditing: (value: boolean) => void; + displayNameControl: Control<{ display_name: string }>; + onCancelEdit: () => void; + onSaveDisplayName: () => void; + isSavingDisplayName: boolean; + avatarFiles: File[]; + setAvatarFiles: (files: File[]) => void; + avatarUploadKey: number; + onAvatarSubmit: () => void; + onClearAvatar: () => void; + isSavingAvatar: boolean; + isRemovingAvatar: boolean; +}) { + return ( + + + + + + + + + + + + } + /> + + + + + + + ) : null + } + > + {!isEditing ? null : ( + + ( + + )} + /> + + + + )} + + + + + + + + + + + + + + + + + + + ); +} diff --git a/frontend/src/views/Settings/components/SectionSurface.tsx b/frontend/src/views/Settings/components/SectionSurface.tsx new file mode 100644 index 00000000..55f73979 --- /dev/null +++ b/frontend/src/views/Settings/components/SectionSurface.tsx @@ -0,0 +1,48 @@ +import { Box, Stack, Typography } from "@mui/material"; +import { alpha } from "@mui/material/styles"; + +export function SectionSurface({ + title, + description, + actions, + children, +}: { + title: string; + description?: string; + actions?: React.ReactNode; + children: React.ReactNode; +}) { + return ( + + + + + + {title} + + {description ? ( + + {description} + + ) : null} + + {actions} + + {children} + + + ); +} diff --git a/frontend/src/views/Settings/components/SecuritySection.tsx b/frontend/src/views/Settings/components/SecuritySection.tsx new file mode 100644 index 00000000..7b23ba98 --- /dev/null +++ b/frontend/src/views/Settings/components/SecuritySection.tsx @@ -0,0 +1,159 @@ +import { Controller, type Control, type FieldErrors } from "react-hook-form"; +import { + Box, + Button, + IconButton, + InputAdornment, + Stack, + TextField, +} from "@mui/material"; +import { Visibility, VisibilityOff } from "@mui/icons-material"; +import { SectionSurface } from "./SectionSurface"; + +type PasswordFormValues = { + currentPassword: string; + newPassword: string; + confirmPassword: string; +}; + +export function SecuritySection({ + passwordControl, + passwordErrors, + handlePasswordSubmit, + onPasswordSubmit, + showCurrentPassword, + setShowCurrentPassword, + showNewPassword, + setShowNewPassword, + showConfirmPassword, + setShowConfirmPassword, + isSavingPassword, +}: { + passwordControl: Control; + passwordErrors: FieldErrors; + handlePasswordSubmit: ( + callback: (data: PasswordFormValues) => void, + ) => React.FormEventHandler; + onPasswordSubmit: (data: PasswordFormValues) => void; + showCurrentPassword: boolean; + setShowCurrentPassword: React.Dispatch>; + showNewPassword: boolean; + setShowNewPassword: React.Dispatch>; + showConfirmPassword: boolean; + setShowConfirmPassword: React.Dispatch>; + isSavingPassword: boolean; +}) { + return ( + + + + ( + + + setShowCurrentPassword((current) => !current) + } + edge="end" + > + {showCurrentPassword ? ( + + ) : ( + + )} + + + ), + }} + /> + )} + /> + ( + + + setShowNewPassword((current) => !current) + } + edge="end" + > + {showNewPassword ? : } + + + ), + }} + /> + )} + /> + ( + + + setShowConfirmPassword((current) => !current) + } + edge="end" + > + {showConfirmPassword ? ( + + ) : ( + + )} + + + ), + }} + /> + )} + /> + + + + + + + ); +} diff --git a/frontend/src/views/Settings/components/SessionHistorySection.tsx b/frontend/src/views/Settings/components/SessionHistorySection.tsx new file mode 100644 index 00000000..c009d48f --- /dev/null +++ b/frontend/src/views/Settings/components/SessionHistorySection.tsx @@ -0,0 +1,201 @@ +import { + Alert, + Box, + Button, + FormControlLabel, + Skeleton, + Stack, + Switch, + Typography, +} from "@mui/material"; +import { alpha } from "@mui/material/styles"; +import { Check, DeleteOutline } from "@mui/icons-material"; +import { UserSessionSummary } from "@/interfaces"; +import { + SessionDeviceIdentity, + SessionMetaItem, + StatusChip, + formatDateTime, + formatReasonLabel, + formatRelativeTime, + getDeviceIcon, +} from "./SessionShared"; + +function SessionRow({ + session, + onCloseSession, + isClosing, +}: { + session: UserSessionSummary; + onCloseSession: (sessionIdentifier: string) => void; + isClosing: boolean; +}) { + const DeviceIcon = getDeviceIcon(session.device_type); + + return ( + + + + + + {session.is_current ? ( + + ) : null} + + + + + + + + + + + + + + + + ); +} + +export function SessionHistorySection({ + isLoading, + isError, + activeSessions, + closedSessions, + showClosedSessions, + onShowClosedSessionsChange, + closeSession, + closingSessionIdentifier, +}: { + isLoading: boolean; + isError: boolean; + activeSessions: UserSessionSummary[]; + closedSessions: UserSessionSummary[]; + showClosedSessions: boolean; + onShowClosedSessionsChange: (nextValue: boolean) => void; + closeSession: (sessionIdentifier: string) => void; + closingSessionIdentifier?: string; +}) { + const visibleSessions = showClosedSessions + ? [...activeSessions, ...closedSessions] + : activeSessions; + + return ( + + + + Session activity + + + onShowClosedSessionsChange(checked)} + /> + } + label={`Show Closed Session${closedSessions?.length > 1 ? "s" : null}`} + /> + + {isLoading ? ( + + + + + ) : isError ? ( + + Unable to load session history right now. + + ) : visibleSessions.length === 0 ? ( + + {showClosedSessions + ? "No recorded sessions were found." + : "No active sessions were found."} + + ) : ( + + {visibleSessions.map((session) => ( + + ))} + + )} + + ); +} diff --git a/frontend/src/views/Settings/components/SessionShared.tsx b/frontend/src/views/Settings/components/SessionShared.tsx new file mode 100644 index 00000000..bdf70bf5 --- /dev/null +++ b/frontend/src/views/Settings/components/SessionShared.tsx @@ -0,0 +1,130 @@ +import { + Avatar, + Box, + Chip, + Typography, + type SvgIconProps, +} from "@mui/material"; +import { alpha } from "@mui/material/styles"; +import { + LaptopMacRounded, + PhoneIphoneRounded, + TabletMacRounded, +} from "@mui/icons-material"; +import type { ComponentType } from "react"; + +export function formatDateTime(value?: string | null) { + if (!value) return "Not available"; + + return new Intl.DateTimeFormat("en-US", { + dateStyle: "medium", + timeStyle: "short", + }).format(new Date(value)); +} + +export function formatRelativeTime(value?: string | null) { + if (!value) return "Unknown"; + + const timestamp = new Date(value).getTime(); + const diffMs = timestamp - Date.now(); + const absMinutes = Math.round(Math.abs(diffMs) / (1000 * 60)); + + if (absMinutes < 1) return "Just now"; + if (absMinutes < 60) { + return `${absMinutes} minute${absMinutes === 1 ? "" : "s"} ${ + diffMs >= 0 ? "from now" : "ago" + }`; + } + + const absHours = Math.round(absMinutes / 60); + if (absHours < 24) { + return `${absHours} hour${absHours === 1 ? "" : "s"} ${ + diffMs >= 0 ? "from now" : "ago" + }`; + } + + const absDays = Math.round(absHours / 24); + return `${absDays} day${absDays === 1 ? "" : "s"} ${ + diffMs >= 0 ? "from now" : "ago" + }`; +} + +export function formatReasonLabel(value?: string | null) { + if (!value) return ""; + return value.split("_").join(" "); +} + +export function getDeviceIcon( + deviceType?: string | null, +): ComponentType { + switch (deviceType) { + case "Mobile": + return PhoneIphoneRounded; + case "Tablet": + return TabletMacRounded; + default: + return LaptopMacRounded; + } +} + +export function SessionMetaItem({ + label, + value, +}: { + label: string; + value: React.ReactNode; +}) { + return ( + + + {label} + + {value} + + ); +} + +export function SessionDeviceIdentity({ + icon: Icon, + title, + subtitle, +}: { + icon: ComponentType; + title: string; + subtitle: string; +}) { + return ( + + + + + + + {title} + + + {subtitle} + + + + ); +} + +export function StatusChip({ + label, + color, + variant, +}: { + label: string; + color: "default" | "primary" | "success"; + variant?: "filled" | "outlined"; +}) { + return ; +} diff --git a/frontend/src/views/Settings/components/index.ts b/frontend/src/views/Settings/components/index.ts new file mode 100644 index 00000000..1b85dae6 --- /dev/null +++ b/frontend/src/views/Settings/components/index.ts @@ -0,0 +1,6 @@ +export * from "./KnownDevicesSection"; +export * from "./PreferencesSection"; +export * from "./ProfileSection"; +export * from "./SecuritySection"; +export * from "./SectionSurface"; +export * from "./SessionHistorySection"; From 6ae9668d257ae16e366567ab07a6f6e9d85f45a8 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 20 Mar 2026 16:30:04 -0500 Subject: [PATCH 06/22] feat(KD): Update ui --- .../Settings/components/KnownDevicesSection.tsx | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/frontend/src/views/Settings/components/KnownDevicesSection.tsx b/frontend/src/views/Settings/components/KnownDevicesSection.tsx index 73330e95..aa108c9d 100644 --- a/frontend/src/views/Settings/components/KnownDevicesSection.tsx +++ b/frontend/src/views/Settings/components/KnownDevicesSection.tsx @@ -52,28 +52,25 @@ function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { - + Sessions {device.session_count} - + Active now {device.active_session_count} - + First seen {formatDateTime(device.signed_in_at_first)} - + Last seen {formatDateTime(device.last_seen_at)} From 6abb183d932a52026be0fc0faa6faa6988a2b0ff Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 20 Mar 2026 16:32:50 -0500 Subject: [PATCH 07/22] fix: broken import --- frontend/src/views/Settings/components/KnownDevicesSection.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/src/views/Settings/components/KnownDevicesSection.tsx b/frontend/src/views/Settings/components/KnownDevicesSection.tsx index aa108c9d..6878bd91 100644 --- a/frontend/src/views/Settings/components/KnownDevicesSection.tsx +++ b/frontend/src/views/Settings/components/KnownDevicesSection.tsx @@ -1,7 +1,6 @@ import { Alert, Box, Skeleton, Stack } from "@mui/material"; import { alpha } from "@mui/material/styles"; import { KnownDeviceSummary } from "@/interfaces"; -import { SectionSurface } from "./SectionSurface"; import { SessionDeviceIdentity, StatusChip, From e4188bb89ac2d5a2ef9b6b7181067b6ca581c003 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Sat, 21 Mar 2026 14:14:39 -0500 Subject: [PATCH 08/22] refactor(ApiServiceNew): broken super file up --- frontend/src/hooks/index.ts | 1 + frontend/src/hooks/useApiClient.ts | 68 + frontend/src/service/ApiServiceNew.ts | 2023 ----------------- frontend/src/service/index.ts | 10 +- frontend/src/service/lookups.ts | 122 + frontend/src/service/mapCache.ts | 12 + frontend/src/service/measurements.ts | 277 +++ frontend/src/service/meters.ts | 185 ++ frontend/src/service/notifications.ts | 118 + frontend/src/service/parts.ts | 354 +++ frontend/src/service/st2.ts | 14 + frontend/src/service/users.ts | 231 ++ frontend/src/service/wells.ts | 209 ++ frontend/src/service/workOrders.ts | 124 + frontend/src/utils/MapCache.ts | 62 + frontend/src/utils/index.ts | 1 + .../MeterHistory/SelectedActivityDetails.tsx | 2 +- .../MeterSelection/MeterSelectionTable.tsx | 2 +- frontend/src/views/Settings.tsx | 2 +- .../src/views/WorkOrders/WorkOrdersTable.tsx | 2 +- 20 files changed, 1791 insertions(+), 2028 deletions(-) create mode 100644 frontend/src/hooks/useApiClient.ts delete mode 100644 frontend/src/service/ApiServiceNew.ts create mode 100644 frontend/src/service/lookups.ts create mode 100644 frontend/src/service/mapCache.ts create mode 100644 frontend/src/service/measurements.ts create mode 100644 frontend/src/service/meters.ts create mode 100644 frontend/src/service/notifications.ts create mode 100644 frontend/src/service/parts.ts create mode 100644 frontend/src/service/st2.ts create mode 100644 frontend/src/service/users.ts create mode 100644 frontend/src/service/wells.ts create mode 100644 frontend/src/service/workOrders.ts create mode 100644 frontend/src/utils/MapCache.ts diff --git a/frontend/src/hooks/index.ts b/frontend/src/hooks/index.ts index 763dbad2..26b894e1 100644 --- a/frontend/src/hooks/index.ts +++ b/frontend/src/hooks/index.ts @@ -1,3 +1,4 @@ +export * from "./useApiClient"; export * from "./useFetchWithAuth"; export * from "./useFetchST2"; export * from "./useIsActiveRoute"; diff --git a/frontend/src/hooks/useApiClient.ts b/frontend/src/hooks/useApiClient.ts new file mode 100644 index 00000000..dc20cb2e --- /dev/null +++ b/frontend/src/hooks/useApiClient.ts @@ -0,0 +1,68 @@ +import { useAuthHeader, useSignOut } from "react-auth-kit"; +import { useNavigate } from "@tanstack/react-router"; +import { formatQueryParams } from "@/utils"; +import { API_URL } from "@/config"; +import { handleExpiredSession } from "@/utils/AuthSession"; + +function formatRoute(route: string) { + return route.replace(/^\/+/, ""); +} + +function buildUrl(route: string, params?: Record) { + return `${API_URL}/${formatRoute(route)}${formatQueryParams(params ?? {})}`; +} + +export const useApiClient = () => { + const authHeader = useAuthHeader(); + const signOut = useSignOut(); + const navigate = useNavigate(); + + const request = ({ + method = "GET", + route, + params, + body, + }: { + method?: "GET" | "POST" | "PATCH" | "DELETE"; + route: string; + params?: Record; + body?: any; + }) => { + return fetch(buildUrl(route, params), { + method, + headers: { + Authorization: authHeader(), + ...(body !== undefined ? { "Content-type": "application/json" } : {}), + }, + body: body !== undefined ? JSON.stringify(body) : undefined, + }); + }; + + const get = async (route: string, params?: Record) => { + const response = await request({ method: "GET", route, params }); + + if (!response.ok) { + if (response.status === 440 && localStorage.getItem("loggedIn")) { + handleExpiredSession({ + signOut, + navigate, + message: "Your session has expired, please login again.", + }); + } + + throw new Error(response.status.toString()); + } + + return response.json(); + }; + + return { + get, + post: (route: string, body: any) => + request({ method: "POST", route, body }), + patch: (route: string, body: any) => + request({ method: "PATCH", route, body }), + delete: (route: string, params?: Record) => + request({ method: "DELETE", route, params }), + }; +}; diff --git a/frontend/src/service/ApiServiceNew.ts b/frontend/src/service/ApiServiceNew.ts deleted file mode 100644 index dcddcc69..00000000 --- a/frontend/src/service/ApiServiceNew.ts +++ /dev/null @@ -1,2023 +0,0 @@ -import { - InfiniteData, - useInfiniteQuery, - useMutation, - useQuery, - useQueryClient, - UseQueryOptions, -} from "react-query"; -import { useAuthHeader, useSignOut } from "react-auth-kit"; -import { useSnackbar } from "notistack"; -import { - ActivityTypeLU, - CreateNotificationPayload, - HomeSummary, - MeterListDTO, - MeterListQueryParams, - MeterTypeLU, - NewWellMeasurement, - NoteTypeLU, - Notification, - NotificationCreateResult, - NotificationQueryParams, - NotificationType, - ObservedPropertyTypeLU, - Page, - ST2Measurement, - ST2Response, - ServiceTypeLU, - User, - WaterLevelQueryParams, - WellMergeParams, - WellMeasurementDTO, - Well, - WellListQueryParams, - WellDetailsQueryParams, - MeterDetailsQueryParams, - MeterDetails, - MeterPartParams, - MeterMapDTO, - MeterHistoryDTO, - Part, - PartTypeLU, - UserRole, - SecurityScope, - UpdatedUserPassword, - WellUseLU, - SubmitWellCreate, - WellUpdate, - Meter, - MeterStatus, - PatchObservationSubmit, - PatchActivitySubmit, - PatchWellMeasurement, - WorkOrder, - PatchWorkOrder, - NewWorkOrder, - MeterRegister, - WaterSource, - WellStatus, -} from "@/interfaces"; -import { IncreaseQuantityPayload } from "@/interfaces"; -import { WorkOrderStatus } from "@/enums"; -import { API_URL } from "@/config"; -import { useNavigate } from "@tanstack/react-router"; -import { handleExpiredSession } from "@/utils/AuthSession"; -import { - PartHistoryResponse, - UpdatePartHistoryPayload, -} from "@/interfaces/PartHistoryResponse"; - -// Cashe for up to 48 hours -const MAP_CACHE_TTL_MS = 1000 * 60 * 60 * 24 * 2; -const MAP_CACHE_PREFIX = "wmdb:map-cache:"; -const MAP_QUERY_ROUTES = ["meters_locations", "well_locations"] as const; - -type StoredMapCache = { - data: T; - updatedAt: number; -}; - -function getMapCacheStorageKey(queryKey: readonly unknown[]) { - return `${MAP_CACHE_PREFIX}${JSON.stringify(queryKey)}`; -} - -function readMapCache(queryKey: readonly unknown[]) { - if (typeof window === "undefined") return undefined; - - const storageKey = getMapCacheStorageKey(queryKey); - const rawValue = window.localStorage.getItem(storageKey); - if (!rawValue) return undefined; - - try { - const parsed = JSON.parse(rawValue) as StoredMapCache; - if ( - !parsed || - typeof parsed.updatedAt !== "number" || - Date.now() - parsed.updatedAt > MAP_CACHE_TTL_MS - ) { - window.localStorage.removeItem(storageKey); - return undefined; - } - - return parsed; - } catch { - window.localStorage.removeItem(storageKey); - return undefined; - } -} - -function writeMapCache(queryKey: readonly unknown[], data: T) { - if (typeof window === "undefined") return; - - const storageKey = getMapCacheStorageKey(queryKey); - const value: StoredMapCache = { - data, - updatedAt: Date.now(), - }; - - window.localStorage.setItem(storageKey, JSON.stringify(value)); -} - -export function clearSavedQueryLocalStorage() { - if (typeof window === "undefined") return; - - const keysToRemove: string[] = []; - for (let i = 0; i < window.localStorage.length; i++) { - const key = window.localStorage.key(i); - if (key?.startsWith(MAP_CACHE_PREFIX)) { - keysToRemove.push(key); - } - } - - keysToRemove.forEach((key) => window.localStorage.removeItem(key)); -} - -function invalidateMapDataCaches( - queryClient: ReturnType, -) { - clearSavedQueryLocalStorage(); - MAP_QUERY_ROUTES.forEach((route) => { - queryClient.removeQueries(route); - queryClient.invalidateQueries(route); - }); -} - -// Date display util -export function toGMT6String(date: Date) { - const dateString = - date.getMonth() + - 1 + - "/" + - (date.getDate() + 1) + - "/" + - date.getFullYear() + - " "; - - date.setHours(date.getHours() - 5); - const timeString = date.toLocaleTimeString("en-US", { - timeZone: "America/Denver", - hour: "numeric", - minute: "numeric", - hour12: true, - }); - - return dateString + timeString; -} - -// Generate a query param string with empty and null fields removed -function formattedQueryParams(queryParams: any) { - if (!queryParams) return ""; - - let queryParamString = new URLSearchParams(); - let params = { ...queryParams }; - - for (let param in params) { - if (params[param] === "" || params[param] == undefined) { - continue; - } - //Handle situation where we have an array of values - if (Array.isArray(params[param])) { - for (let value of params[param]) { - queryParamString.append(param, value); - } - } else { - queryParamString.append(param, params[param]); - } - } - // Convert the URLSearchParams object to a string - let formattedString = "?" + queryParamString.toString(); - - return formattedString; -} - -// Fetch function that handles incoming errors from the response. Used as the queryFn in useQuery hooks -async function GETFetch( - route: string, - params: any, - authHeader: string, - signOut: () => unknown, - navigate: (options: { to: string }) => unknown, -) { - const headers = { Authorization: authHeader }; - const response = await fetch( - `${API_URL}/${route}` + formattedQueryParams(params), - { - headers: headers, - }, - ); - - if (!response.ok) { - // If backend indicates that user's token is expired, log them out and notify - if (response.status == 440 && localStorage.getItem("loggedIn")) { - handleExpiredSession({ - signOut, - navigate, - message: "Your session has expired, please login again.", - }); - } - throw new Error(response.status.toString()); - } - - return response.json(); -} - -// Fetches from the NM API's ST2 subdomain (data that relates to water levels) -// For PVACD data, measurements are every 2 hours giving 12 measurements per day and ~4000 per year -// If I want the last 5 years of data, that's 20,000 measurements and I will need to loop through the @iot.nextLink -// to get all the data -async function GETST2Fetch(route: string) { - const starting_year = new Date().getFullYear() - 5; - - const queryParams = formattedQueryParams({ - $filter: `year(phenomenonTime) gt ${starting_year}`, - $orderby: "phenomenonTime asc", - }); - - const url = `https://st2.newmexicowaterdata.org/FROST-Server/v1.1/`; - - // The ST2 API returns data in chunks of 1000, get each chunk and return them all - let valueList: ST2Measurement[] = []; - let nextLink = url + route + queryParams; - let count = 0; // Ensure that it doesn't get stuck in an infinite loop, if somehow iot.nextLink is always defined - do { - const results: ST2Response = await fetch(nextLink).then((r) => r.json()); - nextLink = results["@iot.nextLink"]; - valueList.push(...results.value); - count++; - } while (nextLink && count < 20); - - return valueList; -} - -async function POSTFetch(route: string, object: any, authHeader: string) { - const headers = { - Authorization: authHeader, - "Content-type": "application/json", - }; - - return fetch(`${API_URL}/${route}`, { - method: "POST", - headers: headers, - body: JSON.stringify(object), - }); -} - -async function PATCHFetch(route: string, object: any, authHeader: string) { - const headers = { - Authorization: authHeader, - "Content-type": "application/json", - }; - - return fetch(`${API_URL}/${route}`, { - method: "PATCH", - headers: headers, - body: JSON.stringify(object), - }); -} - -export function useGetUseTypes() { - const route = "use_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - { keepPreviousData: true }, - ); -} - -export function useGetWaterSources() { - const route = "water_sources"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - { keepPreviousData: true }, - ); -} - -export function useGetWellStatusTypes() { - const route = "well_status_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - { keepPreviousData: true }, - ); -} - -export function useGetMeterList(params: MeterListQueryParams | undefined) { - const route = "meters"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery, Error>([route, params], () => - GETFetch(route, params, authHeader(), signOut, navigate), - ); -} - -export function useGetMeterLocations(searchstring: string | undefined) { - const route = "meters_locations"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - const queryKey = [route, searchstring] as const; - const cachedData = readMapCache(queryKey); - - return useQuery({ - queryKey, - queryFn: () => - GETFetch( - route, - { search_string: searchstring }, - authHeader(), - signOut, - navigate, - ), - initialData: cachedData?.data, - initialDataUpdatedAt: cachedData?.updatedAt, - onSuccess: (data) => writeMapCache(queryKey, data), - staleTime: MAP_CACHE_TTL_MS, - cacheTime: MAP_CACHE_TTL_MS, - refetchOnWindowFocus: false, - refetchOnMount: false, - refetchOnReconnect: false, - }); -} - -export function useGetMeterTypeList() { - const route = "meter_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetHomeSummary() { - const route = "maintenance/home_summary"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetNotifications( - params: NotificationQueryParams | undefined, - options?: UseQueryOptions, Error>, -) { - const route = "notifications"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery, Error>( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { - keepPreviousData: true, - ...options, - }, - ); -} - -export function useGetNotificationTypes() { - const route = "notification_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetUnreadNotificationCount( - options?: UseQueryOptions<{ unread_count: number }, Error>, -) { - const route = "notifications/unread_count"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery<{ unread_count: number }, Error>( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - { - refetchInterval: 60_000, - ...options, - }, - ); -} - -export function useGetMeterRegisterList() { - const route = "meter_registers"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetMeterStatusTypeList() { - const route = "meter_status_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetNoteTypes() { - const route = "note_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetMeterHistory(params: MeterDetailsQueryParams) { - const route = "meter_history"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { enabled: params?.meter_id != undefined }, - ); -} - -export function useGetSecurityScopes() { - const route = "security_scopes"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetRoles(options?: UseQueryOptions) { - const route = "roles"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - options, - ); -} - -export function useGetUserAdminList(options?: UseQueryOptions) { - const route = "usersadmin"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - options, - ); -} - -export function useGetUserList() { - const route = "users"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetUser(id: number, options = {}) { - const route = "users"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, id], - () => GETFetch(`${route}/${id}`, null, authHeader(), signOut, navigate), - options, - ); -} - -export function useGetActivityTypeList() { - const route = "activity_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route, null], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetServiceTypes() { - const route = "service_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route, null], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetWaterLevels(params: WaterLevelQueryParams) { - const route = "waterlevels"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route, params], () => - GETFetch(route, params, authHeader(), signOut, navigate), - ); -} - -export function useGetChloridesLevels(params: WaterLevelQueryParams) { - const route = "chlorides"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route, params], () => - GETFetch(route, params, authHeader(), signOut, navigate), - ); -} - -export function useGetPropertyTypes() { - const route = "observed_property_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery([route], () => - GETFetch(route, null, authHeader(), signOut, navigate), - ); -} - -export function useGetWellById(well_id?: number) { - const route = "wells"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, "detail", well_id], - () => - GETFetch( - `${route}/${well_id}`, - undefined, - authHeader(), - signOut, - navigate, - ), - { enabled: !!well_id }, - ); -} - -export function useGetWells(params: WellListQueryParams | undefined) { - const route = "wells"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery, Error>( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { keepPreviousData: true }, - ); -} - -export function useGetWellLocations( - searchstring: string | undefined, - has_chloride_group: boolean | null = null, -) { - const route = "well_locations"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - const PAGE_SIZE = 500; - const queryKey = [route, searchstring, has_chloride_group] as const; - const cachedData = readMapCache>(queryKey); - - return useInfiniteQuery({ - queryKey, - queryFn: async ({ pageParam = 0 }) => { - return GETFetch( - route, - { - search_string: searchstring, - offset: pageParam, - limit: PAGE_SIZE, - has_chloride_group, - }, - authHeader(), - signOut, - navigate, - ); - }, - getNextPageParam: (lastPage, allPages) => { - // If we got less than PAGE_SIZE, we’re done - if (!lastPage || lastPage.length < PAGE_SIZE) return undefined; - return allPages.length * PAGE_SIZE; // next offset - }, - initialData: cachedData?.data, - initialDataUpdatedAt: cachedData?.updatedAt, - onSuccess: (data) => writeMapCache(queryKey, data), - staleTime: MAP_CACHE_TTL_MS, - cacheTime: MAP_CACHE_TTL_MS, - refetchOnWindowFocus: false, - refetchOnMount: false, - refetchOnReconnect: false, - }); -} - -export function useGetWell(params: WellDetailsQueryParams | undefined) { - const route = "well"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { - keepPreviousData: true, - enabled: params?.well_id != undefined, - }, - ); -} - -export function useGetMeter(params: MeterDetailsQueryParams | undefined) { - const route = "meter"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { - keepPreviousData: true, - enabled: params?.meter_id != undefined, - }, - ); -} - -export function useGetPartTypeList() { - const route = "part_types"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - { - keepPreviousData: true, - }, - ); -} - -export function useGetParts() { - const route = "parts"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route], - () => GETFetch(route, null, authHeader(), signOut, navigate), - { - keepPreviousData: true, - }, - ); -} - -export function useGetPart(params: { part_id: number } | undefined) { - const route = "part"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { - keepPreviousData: true, - enabled: params?.part_id != undefined, - }, - ); -} - -export function useGetMeterPartsList(params: MeterPartParams | undefined) { - const route = "meter_parts"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - [route, params], - () => GETFetch(route, params, authHeader(), signOut, navigate), - { - enabled: params?.meter_id != undefined, - }, - ); -} - -export function useGetST2WaterLevels(datastreamID: number | undefined) { - const route = `Datastreams(${datastreamID})/Observations`; - - return useQuery( - [route, datastreamID], - () => GETST2Fetch(route), - { enabled: !!datastreamID }, - ); -} - -export function useGetWorkOrders( - params: { - filter_by_status: WorkOrderStatus[]; - start_date?: string; // ISO date string (YYYY-MM-DD) - work_order_id?: number[]; - assigned_user_id?: number; - q?: string; - }, - options?: UseQueryOptions, -) { - const route = "work_orders"; - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - // normalize params so queryKey is stable (order of arrays matters) - const normalized = { - ...params, - filter_by_status: [...(params.filter_by_status ?? [])].sort(), - work_order_id: params.work_order_id - ? [...params.work_order_id].sort((a, b) => a - b) - : undefined, - q: params.q?.trim() || undefined, - }; - - return useQuery({ - queryKey: [route, normalized], - queryFn: () => GETFetch(route, normalized, authHeader(), signOut, navigate), - ...options, - }); -} - -export function useCreateUser(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "users"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (user: User) => { - const response = await POSTFetch(route, user, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - queryClient.setQueryData(["usersadmin"], (old: User[] | undefined) => { - if (old != undefined) { - return [...old, responseJson]; - } - return []; - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateUser(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "users"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (updatedUser: User) => { - const response = await PATCHFetch(route, updatedUser, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - - // Update the user on the users list - queryClient.setQueryData(["usersadmin"], (old: User[] | undefined) => { - if (old != undefined) { - let newUsersList = [...old]; - const userIndex = old?.findIndex( - (user) => user.id === responseJson["id"], - ); - - if (userIndex != undefined && userIndex != -1) { - newUsersList[userIndex] = responseJson; - } - - return newUsersList; - } - return []; - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useCreateWell(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "wells"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (new_well: SubmitWellCreate) => { - const response = await POSTFetch(route, new_well, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing RA number", { - variant: "error", - }); - throw Error("RA number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - invalidateMapDataCaches(queryClient); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useCreateRole(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "roles"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (new_role: UserRole) => { - const response = await POSTFetch(route, new_role, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - queryClient.setQueryData(["roles"], (old: UserRole[] | undefined) => { - if (old != undefined) { - return [...old, responseJson]; - } - return []; - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useCreateNotifications(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "notifications"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (payload: CreateNotificationPayload) => { - const response = await POSTFetch(route, payload, authHeader()); - - if (!response.ok) { - const errorMessage = - (await response.json().catch(() => null))?.detail ?? - `Error ${response.status}`; - enqueueSnackbar(errorMessage, { variant: "error" }); - throw Error(errorMessage); - } - - const responseJson: NotificationCreateResult = await response.json(); - onSuccess(responseJson); - queryClient.invalidateQueries("notifications"); - queryClient.invalidateQueries("notifications/unread_count"); - return responseJson; - }, - onSuccess: (result) => { - enqueueSnackbar( - `Created ${result.created_count} notification${result.created_count === 1 ? "" : "s"}.`, - { - variant: "success", - }, - ); - }, - retry: 0, - }); -} - -export function useUpdateNotificationReadStatus(onSuccess?: Function) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "notifications"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (payload: { id: number; is_read: boolean }) => { - const response = await PATCHFetch(route, payload, authHeader()); - - if (!response.ok) { - const errorMessage = - (await response.json().catch(() => null))?.detail ?? - `Error ${response.status}`; - enqueueSnackbar(errorMessage, { variant: "error" }); - throw Error(errorMessage); - } - - return response.json(); - }, - onSuccess: (result) => { - queryClient.invalidateQueries("notifications"); - queryClient.invalidateQueries("notifications/unread_count"); - onSuccess?.(result); - }, - retry: 0, - }); -} - -export function useUpdateWell(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "wells"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (updatedWell: WellUpdate) => { - const response = await PATCHFetch(route, updatedWell, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing RA number", { - variant: "error", - }); - throw Error("RA number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - invalidateMapDataCaches(queryClient); - - // Since query data will be based on params, iterate through all possible queries of this route - const wellsQueries = queryClient.getQueryCache().findAll("wells"); - - wellsQueries.forEach((query: any) => { - queryClient.setQueryData( - query.queryKey, - (old: Page | undefined) => { - if (old != undefined) { - let newPage = JSON.parse(JSON.stringify(old)); // Deep copy so we can edit - - // If well found on the old query data, update it - const wellIndex = old.items.findIndex( - (well: Well) => well.id == responseJson["id"], - ); - if (wellIndex != undefined && wellIndex != -1) { - newPage.items[wellIndex] = responseJson; - } - return newPage; - } - return { items: [], total: 0, limit: 0, offset: 0 }; // Empty page if no old data - }, - ); - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateRole(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "roles"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (updatedRole: UserRole) => { - const response = await PATCHFetch(route, updatedRole, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - - // Update the part on the parts list - queryClient.setQueryData(["roles"], (old: UserRole[] | undefined) => { - if (old != undefined) { - let newRoles = [...old]; - const roleIndex = old?.findIndex( - (role) => role.id === responseJson["id"], - ); - - if (roleIndex != undefined && roleIndex != -1) { - newRoles[roleIndex] = responseJson; - } - - return newRoles; - } - return []; - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateUserPassword(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "users/update_password"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (updatedUserPassword: UpdatedUserPassword) => { - const response = await POSTFetch( - route, - updatedUserPassword, - authHeader(), - ); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateMeterType(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "meter_types"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (meterType: Partial) => { - const response = await PATCHFetch(route, meterType, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - - // Update the part on the parts list - queryClient.setQueryData( - ["meter_types"], - (old: MeterTypeLU[] | undefined) => { - if (old != undefined) { - let newMeterTypesList = [...old]; - const typeIndex = old?.findIndex( - (type) => type.id === responseJson["id"], - ); - - if (typeIndex != undefined && typeIndex != -1) { - newMeterTypesList[typeIndex] = responseJson; - } - - return newMeterTypesList; - } - return []; - }, - ); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useCreateMeter(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "meters"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (meter: Meter) => { - const response = await POSTFetch(route, meter, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing serial number!", { - variant: "error", - }); - throw Error("Meter serial number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - invalidateMapDataCaches(queryClient); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useCreateMeterType(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "meter_types"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (meter_type: MeterTypeLU) => { - const response = await POSTFetch(route, meter_type, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - queryClient.setQueryData( - ["meter_types"], - (old: MeterTypeLU[] | undefined) => { - if (old != undefined) { - return [...old, responseJson]; - } - return []; - }, - ); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdatePart(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "part"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (part: Partial) => { - console.log(part); - const response = await PATCHFetch(route, part, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing serial number!", { - variant: "error", - }); - throw Error("Part serial number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - - // Update the part on the parts list - queryClient.setQueryData(["parts"], (old: Part[] | undefined) => { - if (old != undefined) { - let newPartsList = [...old]; - const partIndex = old?.findIndex( - (part) => part.id === responseJson["id"], - ); - - if (partIndex != undefined && partIndex != -1) { - newPartsList[partIndex] = responseJson; - } - - return newPartsList; - } - return []; - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateMeter(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "meter"; - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (meterDetails: Meter) => { - const response = await PATCHFetch(route, meterDetails, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing serial number!", { - variant: "error", - }); - throw Error("Meter serial number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - invalidateMapDataCaches(queryClient); - - // Since query data will be based on params, iterate through all possible queries of this route - const meterQueries = queryClient.getQueryCache().findAll("meters"); - - meterQueries.forEach((query: any) => { - queryClient.setQueryData( - query.queryKey, - (old: Page | undefined) => { - if (old != undefined) { - let newPage = JSON.parse(JSON.stringify(old)); // Deep copy so we can edit - - // If well found on the old query data, update it - const meterIndex = old.items.findIndex( - (meter: Meter) => meter.id == responseJson["id"], - ); - if (meterIndex != undefined && meterIndex != -1) { - newPage.items[meterIndex] = responseJson; - } - return newPage; - } - return { items: [], total: 0, limit: 0, offset: 0 }; // Empty page if no old data - }, - ); - }); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateObservation(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "observations"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (observation: PatchObservationSubmit) => { - const response = await PATCHFetch(route, observation, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing serial number!", { - variant: "error", - }); - throw Error("Observation serial number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useDeleteObservation(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (observation_id: number) => { - const response = await fetch( - `${API_URL}/observations?observation_id=${observation_id}`, - { - method: "DELETE", - headers: { - Authorization: authHeader(), - }, - }, - ); - - if (!response.ok) { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } else { - onSuccess(); - return true; - } - }, - retry: 0, - }); -} - -export function useUpdateActivity(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "activities"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (activityForm: PatchActivitySubmit) => { - const response = await PATCHFetch(route, activityForm, authHeader()); - - // This responsibility will eventually move to callsite when special error codes arent relied on - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - //There could be a couple reasons for this... out of order activity or duplicate activity - let errorText = await response.text(); - enqueueSnackbar(JSON.parse(errorText).detail, { variant: "error" }); - throw Error(errorText); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useDeleteActivity(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (activity_id: number) => { - const response = await fetch( - `${API_URL}/activities?activity_id=${activity_id}`, - { - method: "DELETE", - headers: { - Authorization: authHeader(), - }, - }, - ); - - if (!response.ok) { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } else { - onSuccess(); - return true; - } - }, - retry: 0, - }); -} - -export function useCreatePart(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "parts"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (part: Part) => { - try { - if (!part.part_type?.id) { - throw new Error("part_type_id is required but missing"); - } - - // Due to the way the form gets generated for a new part, - // I need to populate part_type_id manually here - part.part_type_id = part.part_type?.id; - - const response = await POSTFetch(route, part, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } - if (response.status == 409) { - enqueueSnackbar("Cannot use existing serial number!", { - variant: "error", - }); - throw Error("Part serial number already in database"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - - const responseJson = await response.json(); - queryClient.setQueryData(["parts"], (old: Part[] | undefined) => { - if (old != undefined) { - return [...old, responseJson]; - } - return []; - }); - return responseJson; - } - } catch { - enqueueSnackbar( - "An Error Occurred, Please Ensure the Part Number is Unique", - { variant: "error" }, - ); - throw Error( - "Server side error while creating a part, likely due to a non-unique part number.", - ); - } - }, - retry: 0, - }); -} - -export function useCreateChlorideMeasurement() { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "chlorides"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (newChlorideMeasurement: NewWellMeasurement) => { - const response = await POSTFetch( - route, - newChlorideMeasurement, - authHeader(), - ); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - enqueueSnackbar("Successfully Created New Measurement!", { - variant: "success", - }); - - const responseJson = await response.json(); - - queryClient.setQueryData( - [route, { well_id: responseJson["well_id"] }], - (old: WellMeasurementDTO[] | undefined) => { - return [...(old ?? []), responseJson]; - }, - ); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useCreateWaterLevel() { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const route = "waterlevels"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (newWaterLevel: Partial) => { - const response = await POSTFetch(route, newWaterLevel, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - enqueueSnackbar("Successfully Created New Measurement!", { - variant: "success", - }); - - const responseJson = await response.json(); - - queryClient.setQueryData( - [route, { well_id: responseJson["well_id"] }], - (old: WellMeasurementDTO[] | undefined) => { - return [...(old ?? []), responseJson]; - }, - ); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateWaterLevel(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "waterlevels"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (updatedWaterLevel: Partial) => { - const response = await PATCHFetch(route, updatedWaterLevel, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("One or More Required Fields Not Entered!", { - variant: "error", - }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - enqueueSnackbar("Successfully Updated Measurement!", { - variant: "success", - }); - onSuccess(); //Success function should be used to update measurement table - - const responseJson = await response.json(); - - //Update the water levels previously queried using queryClient **Under development!! - // queryClient.setQueryData([route, {well_id: responseJson["well_id"]}], (old: WellMeasurementDTO[] | undefined) => { - // if (old != undefined) { - // let newWaterLevels = [...old] - // const waterLevelIndex = old.findIndex(waterLevel => waterLevel.id === responseJson["id"]) - - // if (waterLevelIndex != undefined && waterLevelIndex != -1) { - // newWaterLevels[waterLevelIndex] = responseJson - // } - - // return newWaterLevels - // } - // return [] - // }) - - return responseJson; - } - }, - retry: 0, - }); -} - -export function useDeleteWaterLevel() { - const { enqueueSnackbar } = useSnackbar(); - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (waterLevelID: number) => { - const response = await fetch( - `${API_URL}/waterlevels?waterlevel_id=${waterLevelID}`, - { - method: "DELETE", - headers: { - Authorization: authHeader(), - "Content-type": "application/json", - }, - }, - ); - - if (!response.ok) { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } else { - enqueueSnackbar("Successfully Deleted Measurement!", { - variant: "success", - }); - - return true; - } - }, - retry: 0, - }); -} - -export function useMergeWells(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const route = "merge_wells"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (mergeWells: WellMergeParams) => { - console.log(mergeWells); - const response = await POSTFetch(route, mergeWells, authHeader()); - - if (!response.ok) { - if (response.status == 422) { - enqueueSnackbar("Testing remove??!", { variant: "error" }); - throw Error("Incomplete form, check network logs for details"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - onSuccess(); - const responseJson = await response.json(); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useUpdateWorkOrder() { - const { enqueueSnackbar } = useSnackbar(); - const route = "work_orders"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (workOrder: PatchWorkOrder) => { - const response = await PATCHFetch(route, workOrder, authHeader()); - - if (!response.ok) { - if (response.status == 409) { - enqueueSnackbar("Title must be unique for date and meter", { - variant: "error", - }); - throw Error( - "Failure of date, meter, and title uniqueness constraint", - ); - } - if (response.status == 422) { - enqueueSnackbar("Title cannot be blank", { variant: "error" }); - throw Error("Title is empty string"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - const responseJson = await response.json(); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useDeleteWorkOrder(onSuccess: Function) { - const { enqueueSnackbar } = useSnackbar(); - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (workOrderID: number) => { - const response = await fetch( - `${API_URL}/work_orders?work_order_id=${workOrderID}`, - { - method: "DELETE", - headers: { - Authorization: authHeader(), - }, - }, - ); - - if (!response.ok) { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } else { - onSuccess(); - return true; - } - }, - retry: 0, - }); -} - -export function useCreateWorkOrder() { - const { enqueueSnackbar } = useSnackbar(); - const route = "work_orders"; - const authHeader = useAuthHeader(); - - return useMutation({ - mutationFn: async (workOrder: NewWorkOrder) => { - const response = await POSTFetch(route, workOrder, authHeader()); - - if (!response.ok) { - if (response.status == 409) { - enqueueSnackbar("Title must be unique for date and meter", { - variant: "error", - }); - throw Error( - "Failure of date, meter, and title uniqueness constraint", - ); - } - if (response.status == 422) { - enqueueSnackbar("Title cannot be blank", { variant: "error" }); - throw Error("Title is empty string"); - } else { - enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); - throw Error("Unknown Error: " + response.status); - } - } else { - const responseJson = await response.json(); - return responseJson; - } - }, - retry: 0, - }); -} - -export function useAddParts(onSuccess?: () => void) { - const { enqueueSnackbar } = useSnackbar(); - const queryClient = useQueryClient(); - const authHeader = useAuthHeader(); - - const route = "parts/add"; - - return useMutation({ - mutationFn: async (payload: IncreaseQuantityPayload) => { - const response = await POSTFetch(route, payload, authHeader()); - - if (!response.ok) { - if (response.status === 404) { - enqueueSnackbar("Part not found.", { variant: "error" }); - throw new Error("Part not found (404)"); - } - - if (response.status === 422) { - enqueueSnackbar("Missing or invalid fields.", { variant: "error" }); - throw new Error("Validation error (422)"); - } - - // Optional: read backend detail if present - let detail = ""; - try { - const j = await response.json(); - detail = j?.detail ? ` (${j.detail})` : ""; - } catch {} - - enqueueSnackbar( - `Unknown error occurred! (${response.status})${detail}`, - { - variant: "error", - }, - ); - throw new Error(`Unknown Error: ${response.status}${detail}`); - } - - const updatedPart: Part = await response.json(); - - // update any cached parts lists you have - queryClient.setQueryData(["parts"], (old) => { - const safeOld = old ?? []; - return safeOld.map((p) => (p.id === updatedPart.id ? updatedPart : p)); - }); - - onSuccess?.(); - return updatedPart; - }, - retry: 0, - }); -} - -export function useGetPartHistory(partId?: string) { - const authHeader = useAuthHeader(); - const navigate = useNavigate(); - const signOut = useSignOut(); - - return useQuery( - ["parts-history", partId], - () => - GETFetch( - `parts/${partId}/history`, - null, - authHeader(), - signOut, - navigate, - ), - { enabled: !!partId, keepPreviousData: true }, - ); -} - -export function useUpdatePartHistory( - partId?: string, - onSuccess?: (response: PartHistoryResponse) => void, -) { - const { enqueueSnackbar } = useSnackbar(); - const authHeader = useAuthHeader(); - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async (payload: UpdatePartHistoryPayload) => { - if (!partId) { - throw new Error("Missing part id"); - } - - const response = await PATCHFetch( - `parts/${partId}/history`, - payload, - authHeader(), - ); - - if (!response.ok) { - let detail = ""; - try { - const json = await response.json(); - detail = json?.detail ? ` (${json.detail})` : ""; - } catch {} - - if (response.status === 404) { - enqueueSnackbar(`Part history row not found${detail}`, { - variant: "error", - }); - throw new Error(`Part history row not found${detail}`); - } - - if (response.status === 422) { - enqueueSnackbar(`Invalid history update${detail}`, { - variant: "error", - }); - throw new Error(`Invalid history update${detail}`); - } - - enqueueSnackbar( - `Unknown error occurred! (${response.status})${detail}`, - { - variant: "error", - }, - ); - throw new Error(`Unknown Error: ${response.status}${detail}`); - } - - const responseJson: PartHistoryResponse = await response.json(); - - queryClient.setQueryData(["parts-history", partId], responseJson); - queryClient.invalidateQueries({ queryKey: ["parts"] }); - queryClient.invalidateQueries({ queryKey: ["part"] }); - - onSuccess?.(responseJson); - return responseJson; - }, - retry: 0, - }); -} diff --git a/frontend/src/service/index.ts b/frontend/src/service/index.ts index 67ffb42e..7c9d90fc 100644 --- a/frontend/src/service/index.ts +++ b/frontend/src/service/index.ts @@ -1 +1,9 @@ -export * from "./ApiServiceNew"; +export * from "./lookups"; +export * from "./meters"; +export * from "./measurements"; +export * from "./notifications"; +export * from "./parts"; +export * from "./st2"; +export * from "./users"; +export * from "./wells"; +export * from "./workOrders"; diff --git a/frontend/src/service/lookups.ts b/frontend/src/service/lookups.ts new file mode 100644 index 00000000..422b9c59 --- /dev/null +++ b/frontend/src/service/lookups.ts @@ -0,0 +1,122 @@ +import { useQuery } from "react-query"; +import { useApiClient } from "@/hooks"; +import { + ActivityTypeLU, + HomeSummary, + MeterRegister, + MeterStatus, + MeterTypeLU, + NoteTypeLU, + ObservedPropertyTypeLU, + PartTypeLU, + SecurityScope, + ServiceTypeLU, + WaterSource, + WellStatus, + WellUseLU, +} from "@/interfaces"; + +export function useGetUseTypes() { + const apiClient = useApiClient(); + const route = "use_types"; + + return useQuery([route], () => apiClient.get(route), { + keepPreviousData: true, + }); +} + +export function useGetWaterSources() { + const apiClient = useApiClient(); + const route = "water_sources"; + + return useQuery([route], () => apiClient.get(route), { + keepPreviousData: true, + }); +} + +export function useGetWellStatusTypes() { + const apiClient = useApiClient(); + const route = "well_status_types"; + + return useQuery([route], () => apiClient.get(route), { + keepPreviousData: true, + }); +} + +export function useGetMeterTypeList() { + const apiClient = useApiClient(); + const route = "meter_types"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetHomeSummary() { + const apiClient = useApiClient(); + const route = "maintenance/home_summary"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetMeterRegisterList() { + const apiClient = useApiClient(); + const route = "meter_registers"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetMeterStatusTypeList() { + const apiClient = useApiClient(); + const route = "meter_status_types"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetNoteTypes() { + const apiClient = useApiClient(); + const route = "note_types"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetSecurityScopes() { + const apiClient = useApiClient(); + const route = "security_scopes"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetActivityTypeList() { + const apiClient = useApiClient(); + const route = "activity_types"; + + return useQuery([route, null], () => + apiClient.get(route), + ); +} + +export function useGetServiceTypes() { + const apiClient = useApiClient(); + const route = "service_types"; + + return useQuery([route, null], () => + apiClient.get(route), + ); +} + +export function useGetPropertyTypes() { + const apiClient = useApiClient(); + const route = "observed_property_types"; + + return useQuery([route], () => + apiClient.get(route), + ); +} + +export function useGetPartTypeList() { + const apiClient = useApiClient(); + const route = "part_types"; + + return useQuery([route], () => apiClient.get(route), { + keepPreviousData: true, + }); +} diff --git a/frontend/src/service/mapCache.ts b/frontend/src/service/mapCache.ts new file mode 100644 index 00000000..97eb4e0c --- /dev/null +++ b/frontend/src/service/mapCache.ts @@ -0,0 +1,12 @@ +import { QueryClient } from "react-query"; +import { clearSavedQueryLocalStorage } from "@/utils"; + +const MAP_QUERY_ROUTES = ["meters_locations", "well_locations"] as const; + +export function invalidateMapDataCaches(queryClient: QueryClient) { + clearSavedQueryLocalStorage(); + MAP_QUERY_ROUTES.forEach((route) => { + queryClient.removeQueries(route); + queryClient.invalidateQueries(route); + }); +} diff --git a/frontend/src/service/measurements.ts b/frontend/src/service/measurements.ts new file mode 100644 index 00000000..34ea1050 --- /dev/null +++ b/frontend/src/service/measurements.ts @@ -0,0 +1,277 @@ +import { useSnackbar } from "notistack"; +import { useMutation, useQuery, useQueryClient } from "react-query"; +import { useApiClient } from "@/hooks"; +import { + NewWellMeasurement, + PatchActivitySubmit, + PatchObservationSubmit, + PatchWellMeasurement, + WaterLevelQueryParams, + WellMeasurementDTO, +} from "@/interfaces"; + +export function useGetWaterLevels(params: WaterLevelQueryParams) { + const apiClient = useApiClient(); + const route = "waterlevels"; + + return useQuery([route, params], () => + apiClient.get(route, params), + ); +} + +export function useGetChloridesLevels(params: WaterLevelQueryParams) { + const apiClient = useApiClient(); + const route = "chlorides"; + + return useQuery([route, params], () => + apiClient.get(route, params), + ); +} + +export function useUpdateObservation(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "observations"; + + return useMutation({ + mutationFn: async (observation: PatchObservationSubmit) => { + const response = await apiClient.patch(route, observation); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing serial number!", { + variant: "error", + }); + throw Error("Observation serial number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useDeleteObservation(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + + return useMutation({ + mutationFn: async (observation_id: number) => { + const response = await apiClient.delete("observations", { observation_id }); + + if (!response.ok) { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } else { + onSuccess(); + return true; + } + }, + retry: 0, + }); +} + +export function useUpdateActivity(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "activities"; + + return useMutation({ + mutationFn: async (activityForm: PatchActivitySubmit) => { + const response = await apiClient.patch(route, activityForm); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + let errorText = await response.text(); + enqueueSnackbar(JSON.parse(errorText).detail, { variant: "error" }); + throw Error(errorText); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useDeleteActivity(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + + return useMutation({ + mutationFn: async (activity_id: number) => { + const response = await apiClient.delete("activities", { activity_id }); + + if (!response.ok) { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } else { + onSuccess(); + return true; + } + }, + retry: 0, + }); +} + +export function useCreateChlorideMeasurement() { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "chlorides"; + + return useMutation({ + mutationFn: async (newChlorideMeasurement: NewWellMeasurement) => { + const response = await apiClient.post(route, newChlorideMeasurement); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + enqueueSnackbar("Successfully Created New Measurement!", { + variant: "success", + }); + + const responseJson = await response.json(); + + queryClient.setQueryData( + [route, { well_id: responseJson["well_id"] }], + (old: WellMeasurementDTO[] | undefined) => { + return [...(old ?? []), responseJson]; + }, + ); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useCreateWaterLevel() { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "waterlevels"; + + return useMutation({ + mutationFn: async (newWaterLevel: Partial) => { + const response = await apiClient.post(route, newWaterLevel); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + enqueueSnackbar("Successfully Created New Measurement!", { + variant: "success", + }); + + const responseJson = await response.json(); + + queryClient.setQueryData( + [route, { well_id: responseJson["well_id"] }], + (old: WellMeasurementDTO[] | undefined) => { + return [...(old ?? []), responseJson]; + }, + ); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdateWaterLevel(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "waterlevels"; + + return useMutation({ + mutationFn: async (updatedWaterLevel: Partial) => { + const response = await apiClient.patch(route, updatedWaterLevel); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + enqueueSnackbar("Successfully Updated Measurement!", { + variant: "success", + }); + onSuccess(); + + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useDeleteWaterLevel() { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + + return useMutation({ + mutationFn: async (waterLevelID: number) => { + const response = await apiClient.delete("waterlevels", { + waterlevel_id: waterLevelID, + }); + + if (!response.ok) { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } else { + enqueueSnackbar("Successfully Deleted Measurement!", { + variant: "success", + }); + + return true; + } + }, + retry: 0, + }); +} diff --git a/frontend/src/service/meters.ts b/frontend/src/service/meters.ts new file mode 100644 index 00000000..2889d898 --- /dev/null +++ b/frontend/src/service/meters.ts @@ -0,0 +1,185 @@ +import { useSnackbar } from "notistack"; +import { useMutation, useQuery, useQueryClient } from "react-query"; +import { useApiClient } from "@/hooks"; +import { + Meter, + MeterDetails, + MeterDetailsQueryParams, + MeterHistoryDTO, + MeterListDTO, + MeterListQueryParams, + MeterMapDTO, + MeterPartParams, + Page, + Part, +} from "@/interfaces"; +import { MAP_CACHE_TTL_MS, readMapCache, writeMapCache } from "@/utils"; +import { invalidateMapDataCaches } from "./mapCache"; + +export function useGetMeterList(params: MeterListQueryParams | undefined) { + const apiClient = useApiClient(); + const route = "meters"; + + return useQuery, Error>([route, params], () => + apiClient.get(route, params), + ); +} + +export function useGetMeterLocations(searchstring: string | undefined) { + const apiClient = useApiClient(); + const route = "meters_locations"; + const queryKey = [route, searchstring] as const; + const cachedData = readMapCache(queryKey); + + return useQuery({ + queryKey, + queryFn: () => + apiClient.get(route, { + search_string: searchstring, + }), + initialData: cachedData?.data, + initialDataUpdatedAt: cachedData?.updatedAt, + onSuccess: (data) => writeMapCache(queryKey, data), + staleTime: MAP_CACHE_TTL_MS, + cacheTime: MAP_CACHE_TTL_MS, + refetchOnWindowFocus: false, + refetchOnMount: false, + refetchOnReconnect: false, + }); +} + +export function useGetMeterHistory(params: MeterDetailsQueryParams) { + const apiClient = useApiClient(); + const route = "meter_history"; + + return useQuery( + [route, params], + () => apiClient.get(route, params), + { enabled: params?.meter_id != undefined }, + ); +} + +export function useGetMeter(params: MeterDetailsQueryParams | undefined) { + const apiClient = useApiClient(); + const route = "meter"; + + return useQuery( + [route, params], + () => apiClient.get(route, params), + { + keepPreviousData: true, + enabled: params?.meter_id != undefined, + }, + ); +} + +export function useGetMeterPartsList(params: MeterPartParams | undefined) { + const apiClient = useApiClient(); + const route = "meter_parts"; + + return useQuery( + [route, params], + () => apiClient.get(route, params), + { + enabled: params?.meter_id != undefined, + }, + ); +} + +export function useCreateMeter(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "meters"; + + return useMutation({ + mutationFn: async (meter: Meter) => { + const response = await apiClient.post(route, meter); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing serial number!", { + variant: "error", + }); + throw Error("Meter serial number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + invalidateMapDataCaches(queryClient); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdateMeter(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "meter"; + + return useMutation({ + mutationFn: async (meterDetails: Meter) => { + const response = await apiClient.patch(route, meterDetails); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing serial number!", { + variant: "error", + }); + throw Error("Meter serial number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + invalidateMapDataCaches(queryClient); + + const meterQueries = queryClient.getQueryCache().findAll("meters"); + + meterQueries.forEach((query: any) => { + queryClient.setQueryData( + query.queryKey, + (old: Page | undefined) => { + if (old != undefined) { + let newPage = JSON.parse(JSON.stringify(old)); + + const meterIndex = old.items.findIndex( + (meter: Meter) => meter.id == responseJson["id"], + ); + if (meterIndex != undefined && meterIndex != -1) { + newPage.items[meterIndex] = responseJson; + } + return newPage; + } + return { items: [], total: 0, limit: 0, offset: 0 }; + }, + ); + }); + return responseJson; + } + }, + retry: 0, + }); +} diff --git a/frontend/src/service/notifications.ts b/frontend/src/service/notifications.ts new file mode 100644 index 00000000..e243c3ea --- /dev/null +++ b/frontend/src/service/notifications.ts @@ -0,0 +1,118 @@ +import { useSnackbar } from "notistack"; +import { useMutation, useQuery, useQueryClient, UseQueryOptions } from "react-query"; +import { useApiClient } from "@/hooks"; +import { + CreateNotificationPayload, + Notification, + NotificationCreateResult, + NotificationQueryParams, + NotificationType, + Page, +} from "@/interfaces"; + +export function useGetNotifications( + params: NotificationQueryParams | undefined, + options?: UseQueryOptions, Error>, +) { + const apiClient = useApiClient(); + const route = "notifications"; + + return useQuery, Error>( + [route, params], + () => apiClient.get(route, params), + { + keepPreviousData: true, + ...options, + }, + ); +} + +export function useGetNotificationTypes() { + const apiClient = useApiClient(); + const route = "notification_types"; + + return useQuery([route], () => + apiClient.get(route), + ); +} + +export function useGetUnreadNotificationCount( + options?: UseQueryOptions<{ unread_count: number }, Error>, +) { + const apiClient = useApiClient(); + const route = "notifications/unread_count"; + + return useQuery<{ unread_count: number }, Error>( + [route], + () => apiClient.get(route), + { + refetchInterval: 60_000, + ...options, + }, + ); +} + +export function useCreateNotifications(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "notifications"; + + return useMutation({ + mutationFn: async (payload: CreateNotificationPayload) => { + const response = await apiClient.post(route, payload); + + if (!response.ok) { + const errorMessage = + (await response.json().catch(() => null))?.detail ?? + `Error ${response.status}`; + enqueueSnackbar(errorMessage, { variant: "error" }); + throw Error(errorMessage); + } + + const responseJson: NotificationCreateResult = await response.json(); + onSuccess(responseJson); + queryClient.invalidateQueries("notifications"); + queryClient.invalidateQueries("notifications/unread_count"); + return responseJson; + }, + onSuccess: (result) => { + enqueueSnackbar( + `Created ${result.created_count} notification${result.created_count === 1 ? "" : "s"}.`, + { + variant: "success", + }, + ); + }, + retry: 0, + }); +} + +export function useUpdateNotificationReadStatus(onSuccess?: Function) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "notifications"; + + return useMutation({ + mutationFn: async (payload: { id: number; is_read: boolean }) => { + const response = await apiClient.patch(route, payload); + + if (!response.ok) { + const errorMessage = + (await response.json().catch(() => null))?.detail ?? + `Error ${response.status}`; + enqueueSnackbar(errorMessage, { variant: "error" }); + throw Error(errorMessage); + } + + return response.json(); + }, + onSuccess: (result) => { + queryClient.invalidateQueries("notifications"); + queryClient.invalidateQueries("notifications/unread_count"); + onSuccess?.(result); + }, + retry: 0, + }); +} diff --git a/frontend/src/service/parts.ts b/frontend/src/service/parts.ts new file mode 100644 index 00000000..f7728a71 --- /dev/null +++ b/frontend/src/service/parts.ts @@ -0,0 +1,354 @@ +import { useSnackbar } from "notistack"; +import { useMutation, useQuery, useQueryClient } from "react-query"; +import { useApiClient } from "@/hooks"; +import { IncreaseQuantityPayload } from "@/interfaces"; +import { MeterTypeLU, Part } from "@/interfaces"; +import { + PartHistoryResponse, + UpdatePartHistoryPayload, +} from "@/interfaces/PartHistoryResponse"; + +export function useGetParts() { + const apiClient = useApiClient(); + const route = "parts"; + + return useQuery([route], () => apiClient.get(route), { + keepPreviousData: true, + }); +} + +export function useGetPart(params: { part_id: number } | undefined) { + const apiClient = useApiClient(); + const route = "part"; + + return useQuery( + [route, params], + () => apiClient.get(route, params), + { + keepPreviousData: true, + enabled: params?.part_id != undefined, + }, + ); +} + +export function useUpdateMeterType(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "meter_types"; + + return useMutation({ + mutationFn: async (meterType: Partial) => { + const response = await apiClient.patch(route, meterType); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + + queryClient.setQueryData( + ["meter_types"], + (old: MeterTypeLU[] | undefined) => { + if (old != undefined) { + let newMeterTypesList = [...old]; + const typeIndex = old?.findIndex( + (type) => type.id === responseJson["id"], + ); + + if (typeIndex != undefined && typeIndex != -1) { + newMeterTypesList[typeIndex] = responseJson; + } + + return newMeterTypesList; + } + return []; + }, + ); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useCreateMeterType(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "meter_types"; + + return useMutation({ + mutationFn: async (meter_type: MeterTypeLU) => { + const response = await apiClient.post(route, meter_type); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + queryClient.setQueryData( + ["meter_types"], + (old: MeterTypeLU[] | undefined) => { + if (old != undefined) { + return [...old, responseJson]; + } + return []; + }, + ); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdatePart(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "part"; + + return useMutation({ + mutationFn: async (part: Partial) => { + console.log(part); + const response = await apiClient.patch(route, part); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing serial number!", { + variant: "error", + }); + throw Error("Part serial number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + + queryClient.setQueryData(["parts"], (old: Part[] | undefined) => { + if (old != undefined) { + let newPartsList = [...old]; + const partIndex = old?.findIndex( + (part) => part.id === responseJson["id"], + ); + + if (partIndex != undefined && partIndex != -1) { + newPartsList[partIndex] = responseJson; + } + + return newPartsList; + } + return []; + }); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useCreatePart(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "parts"; + + return useMutation({ + mutationFn: async (part: Part) => { + try { + if (!part.part_type?.id) { + throw new Error("part_type_id is required but missing"); + } + + part.part_type_id = part.part_type?.id; + + const response = await apiClient.post(route, part); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing serial number!", { + variant: "error", + }); + throw Error("Part serial number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + queryClient.setQueryData(["parts"], (old: Part[] | undefined) => { + if (old != undefined) { + return [...old, responseJson]; + } + return []; + }); + return responseJson; + } + } catch { + enqueueSnackbar( + "An Error Occurred, Please Ensure the Part Number is Unique", + { variant: "error" }, + ); + throw Error( + "Server side error while creating a part, likely due to a non-unique part number.", + ); + } + }, + retry: 0, + }); +} + +export function useAddParts(onSuccess?: () => void) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "parts/add"; + + return useMutation({ + mutationFn: async (payload: IncreaseQuantityPayload) => { + const response = await apiClient.post(route, payload); + + if (!response.ok) { + if (response.status === 404) { + enqueueSnackbar("Part not found.", { variant: "error" }); + throw new Error("Part not found (404)"); + } + + if (response.status === 422) { + enqueueSnackbar("Missing or invalid fields.", { variant: "error" }); + throw new Error("Validation error (422)"); + } + + let detail = ""; + try { + const j = await response.json(); + detail = j?.detail ? ` (${j.detail})` : ""; + } catch {} + + enqueueSnackbar( + `Unknown error occurred! (${response.status})${detail}`, + { + variant: "error", + }, + ); + throw new Error(`Unknown Error: ${response.status}${detail}`); + } + + const updatedPart: Part = await response.json(); + + queryClient.setQueryData(["parts"], (old) => { + const safeOld = old ?? []; + return safeOld.map((p) => (p.id === updatedPart.id ? updatedPart : p)); + }); + + onSuccess?.(); + return updatedPart; + }, + retry: 0, + }); +} + +export function useGetPartHistory(partId?: string) { + const apiClient = useApiClient(); + + return useQuery( + ["parts-history", partId], + () => apiClient.get(`parts/${partId}/history`), + { enabled: !!partId, keepPreviousData: true }, + ); +} + +export function useUpdatePartHistory( + partId?: string, + onSuccess?: (response: PartHistoryResponse) => void, +) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async (payload: UpdatePartHistoryPayload) => { + if (!partId) { + throw new Error("Missing part id"); + } + + const response = await apiClient.patch(`parts/${partId}/history`, payload); + + if (!response.ok) { + let detail = ""; + try { + const json = await response.json(); + detail = json?.detail ? ` (${json.detail})` : ""; + } catch {} + + if (response.status === 404) { + enqueueSnackbar(`Part history row not found${detail}`, { + variant: "error", + }); + throw new Error(`Part history row not found${detail}`); + } + + if (response.status === 422) { + enqueueSnackbar(`Invalid history update${detail}`, { + variant: "error", + }); + throw new Error(`Invalid history update${detail}`); + } + + enqueueSnackbar( + `Unknown error occurred! (${response.status})${detail}`, + { + variant: "error", + }, + ); + throw new Error(`Unknown Error: ${response.status}${detail}`); + } + + const responseJson: PartHistoryResponse = await response.json(); + + queryClient.setQueryData(["parts-history", partId], responseJson); + queryClient.invalidateQueries({ queryKey: ["parts"] }); + queryClient.invalidateQueries({ queryKey: ["part"] }); + + onSuccess?.(responseJson); + return responseJson; + }, + retry: 0, + }); +} diff --git a/frontend/src/service/st2.ts b/frontend/src/service/st2.ts new file mode 100644 index 00000000..5b0110b7 --- /dev/null +++ b/frontend/src/service/st2.ts @@ -0,0 +1,14 @@ +import { useQuery } from "react-query"; +import { useFetchST2 } from "@/hooks"; +import { ST2Measurement } from "@/interfaces"; + +export function useGetST2WaterLevels(datastreamID: number | undefined) { + const route = `Datastreams(${datastreamID})/Observations`; + const fetchST2 = useFetchST2(); + + return useQuery( + [route, datastreamID], + () => fetchST2("GET", `/${route}`), + { enabled: !!datastreamID }, + ); +} diff --git a/frontend/src/service/users.ts b/frontend/src/service/users.ts new file mode 100644 index 00000000..40f141b3 --- /dev/null +++ b/frontend/src/service/users.ts @@ -0,0 +1,231 @@ +import { useSnackbar } from "notistack"; +import { useMutation, useQuery, useQueryClient, UseQueryOptions } from "react-query"; +import { useApiClient } from "@/hooks"; +import { UpdatedUserPassword, User, UserRole } from "@/interfaces"; + +export function useGetRoles(options?: UseQueryOptions) { + const apiClient = useApiClient(); + const route = "roles"; + + return useQuery([route], () => apiClient.get(route), options); +} + +export function useGetUserAdminList(options?: UseQueryOptions) { + const apiClient = useApiClient(); + const route = "usersadmin"; + + return useQuery([route], () => apiClient.get(route), options); +} + +export function useGetUserList() { + const apiClient = useApiClient(); + const route = "users"; + + return useQuery([route], () => apiClient.get(route)); +} + +export function useGetUser(id: number, options = {}) { + const apiClient = useApiClient(); + const route = "users"; + + return useQuery( + [route, id], + () => apiClient.get(`${route}/${id}`), + options, + ); +} + +export function useCreateUser(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "users"; + + return useMutation({ + mutationFn: async (user: User) => { + const response = await apiClient.post(route, user); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + queryClient.setQueryData(["usersadmin"], (old: User[] | undefined) => { + if (old != undefined) { + return [...old, responseJson]; + } + return []; + }); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdateUser(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "users"; + + return useMutation({ + mutationFn: async (updatedUser: User) => { + const response = await apiClient.patch(route, updatedUser); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + + queryClient.setQueryData(["usersadmin"], (old: User[] | undefined) => { + if (old != undefined) { + let newUsersList = [...old]; + const userIndex = old?.findIndex( + (user) => user.id === responseJson["id"], + ); + + if (userIndex != undefined && userIndex != -1) { + newUsersList[userIndex] = responseJson; + } + + return newUsersList; + } + return []; + }); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useCreateRole(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const queryClient = useQueryClient(); + const apiClient = useApiClient(); + const route = "roles"; + + return useMutation({ + mutationFn: async (new_role: UserRole) => { + const response = await apiClient.post(route, new_role); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + + const responseJson = await response.json(); + queryClient.setQueryData(["roles"], (old: UserRole[] | undefined) => { + if (old != undefined) { + return [...old, responseJson]; + } + return []; + }); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdateRole(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "roles"; + + return useMutation({ + mutationFn: async (updatedRole: UserRole) => { + const response = await apiClient.patch(route, updatedRole); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + + queryClient.setQueryData(["roles"], (old: UserRole[] | undefined) => { + if (old != undefined) { + let newRoles = [...old]; + const roleIndex = old?.findIndex( + (role) => role.id === responseJson["id"], + ); + + if (roleIndex != undefined && roleIndex != -1) { + newRoles[roleIndex] = responseJson; + } + + return newRoles; + } + return []; + }); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdateUserPassword(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "users/update_password"; + + return useMutation({ + mutationFn: async (updatedUserPassword: UpdatedUserPassword) => { + const response = await apiClient.post(route, updatedUserPassword); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} diff --git a/frontend/src/service/wells.ts b/frontend/src/service/wells.ts new file mode 100644 index 00000000..be6b7dee --- /dev/null +++ b/frontend/src/service/wells.ts @@ -0,0 +1,209 @@ +import { InfiniteData, useInfiniteQuery, useMutation, useQuery, useQueryClient } from "react-query"; +import { useSnackbar } from "notistack"; +import { useApiClient } from "@/hooks"; +import { + Page, + SubmitWellCreate, + Well, + WellDetailsQueryParams, + WellListQueryParams, + WellMergeParams, + WellUpdate, +} from "@/interfaces"; +import { MAP_CACHE_TTL_MS, readMapCache, writeMapCache } from "@/utils"; +import { invalidateMapDataCaches } from "./mapCache"; + +export function useGetWellById(well_id?: number) { + const apiClient = useApiClient(); + const route = "wells"; + + return useQuery( + [route, "detail", well_id], + () => apiClient.get(`${route}/${well_id}`), + { enabled: !!well_id }, + ); +} + +export function useGetWells(params: WellListQueryParams | undefined) { + const apiClient = useApiClient(); + const route = "wells"; + + return useQuery, Error>( + [route, params], + () => apiClient.get(route, params), + { keepPreviousData: true }, + ); +} + +export function useGetWellLocations( + searchstring: string | undefined, + has_chloride_group: boolean | null = null, +) { + const apiClient = useApiClient(); + const route = "well_locations"; + const PAGE_SIZE = 500; + const queryKey = [route, searchstring, has_chloride_group] as const; + const cachedData = readMapCache>(queryKey); + + return useInfiniteQuery({ + queryKey, + queryFn: async ({ pageParam = 0 }) => { + return apiClient.get(route, { + search_string: searchstring, + offset: pageParam, + limit: PAGE_SIZE, + has_chloride_group, + }); + }, + getNextPageParam: (lastPage, allPages) => { + if (!lastPage || lastPage.length < PAGE_SIZE) return undefined; + return allPages.length * PAGE_SIZE; + }, + initialData: cachedData?.data, + initialDataUpdatedAt: cachedData?.updatedAt, + onSuccess: (data) => writeMapCache(queryKey, data), + staleTime: MAP_CACHE_TTL_MS, + cacheTime: MAP_CACHE_TTL_MS, + refetchOnWindowFocus: false, + refetchOnMount: false, + refetchOnReconnect: false, + }); +} + +export function useGetWell(params: WellDetailsQueryParams | undefined) { + const apiClient = useApiClient(); + const route = "well"; + + return useQuery( + [route, params], + () => apiClient.get(route, params), + { + keepPreviousData: true, + enabled: params?.well_id != undefined, + }, + ); +} + +export function useCreateWell(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "wells"; + + return useMutation({ + mutationFn: async (new_well: SubmitWellCreate) => { + const response = await apiClient.post(route, new_well); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing RA number", { + variant: "error", + }); + throw Error("RA number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + invalidateMapDataCaches(queryClient); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useUpdateWell(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const queryClient = useQueryClient(); + const route = "wells"; + + return useMutation({ + mutationFn: async (updatedWell: WellUpdate) => { + const response = await apiClient.patch(route, updatedWell); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("One or More Required Fields Not Entered!", { + variant: "error", + }); + throw Error("Incomplete form, check network logs for details"); + } + if (response.status == 409) { + enqueueSnackbar("Cannot use existing RA number", { + variant: "error", + }); + throw Error("RA number already in database"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + invalidateMapDataCaches(queryClient); + + const wellsQueries = queryClient.getQueryCache().findAll("wells"); + + wellsQueries.forEach((query: any) => { + queryClient.setQueryData( + query.queryKey, + (old: Page | undefined) => { + if (old != undefined) { + let newPage = JSON.parse(JSON.stringify(old)); + + const wellIndex = old.items.findIndex( + (well: Well) => well.id == responseJson["id"], + ); + if (wellIndex != undefined && wellIndex != -1) { + newPage.items[wellIndex] = responseJson; + } + return newPage; + } + return { items: [], total: 0, limit: 0, offset: 0 }; + }, + ); + }); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useMergeWells(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "merge_wells"; + + return useMutation({ + mutationFn: async (mergeWells: WellMergeParams) => { + console.log(mergeWells); + const response = await apiClient.post(route, mergeWells); + + if (!response.ok) { + if (response.status == 422) { + enqueueSnackbar("Testing remove??!", { variant: "error" }); + throw Error("Incomplete form, check network logs for details"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + onSuccess(); + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} diff --git a/frontend/src/service/workOrders.ts b/frontend/src/service/workOrders.ts new file mode 100644 index 00000000..4f5ce226 --- /dev/null +++ b/frontend/src/service/workOrders.ts @@ -0,0 +1,124 @@ +import { useSnackbar } from "notistack"; +import { useMutation, useQuery, UseQueryOptions } from "react-query"; +import { useApiClient } from "@/hooks"; +import { NewWorkOrder, PatchWorkOrder, WorkOrder } from "@/interfaces"; +import { WorkOrderStatus } from "@/enums"; + +export function useGetWorkOrders( + params: { + filter_by_status: WorkOrderStatus[]; + start_date?: string; + work_order_id?: number[]; + assigned_user_id?: number; + q?: string; + }, + options?: UseQueryOptions, +) { + const apiClient = useApiClient(); + const route = "work_orders"; + + const normalized = { + ...params, + filter_by_status: [...(params.filter_by_status ?? [])].sort(), + work_order_id: params.work_order_id + ? [...params.work_order_id].sort((a, b) => a - b) + : undefined, + q: params.q?.trim() || undefined, + }; + + return useQuery({ + queryKey: [route, normalized], + queryFn: () => apiClient.get(route, normalized), + ...options, + }); +} + +export function useUpdateWorkOrder() { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "work_orders"; + + return useMutation({ + mutationFn: async (workOrder: PatchWorkOrder) => { + const response = await apiClient.patch(route, workOrder); + + if (!response.ok) { + if (response.status == 409) { + enqueueSnackbar("Title must be unique for date and meter", { + variant: "error", + }); + throw Error( + "Failure of date, meter, and title uniqueness constraint", + ); + } + if (response.status == 422) { + enqueueSnackbar("Title cannot be blank", { variant: "error" }); + throw Error("Title is empty string"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} + +export function useDeleteWorkOrder(onSuccess: Function) { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + + return useMutation({ + mutationFn: async (workOrderID: number) => { + const response = await apiClient.delete("work_orders", { + work_order_id: workOrderID, + }); + + if (!response.ok) { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } else { + onSuccess(); + return true; + } + }, + retry: 0, + }); +} + +export function useCreateWorkOrder() { + const { enqueueSnackbar } = useSnackbar(); + const apiClient = useApiClient(); + const route = "work_orders"; + + return useMutation({ + mutationFn: async (workOrder: NewWorkOrder) => { + const response = await apiClient.post(route, workOrder); + + if (!response.ok) { + if (response.status == 409) { + enqueueSnackbar("Title must be unique for date and meter", { + variant: "error", + }); + throw Error( + "Failure of date, meter, and title uniqueness constraint", + ); + } + if (response.status == 422) { + enqueueSnackbar("Title cannot be blank", { variant: "error" }); + throw Error("Title is empty string"); + } else { + enqueueSnackbar("Unknown Error Occurred!", { variant: "error" }); + throw Error("Unknown Error: " + response.status); + } + } else { + const responseJson = await response.json(); + return responseJson; + } + }, + retry: 0, + }); +} diff --git a/frontend/src/utils/MapCache.ts b/frontend/src/utils/MapCache.ts new file mode 100644 index 00000000..17f55cb3 --- /dev/null +++ b/frontend/src/utils/MapCache.ts @@ -0,0 +1,62 @@ +const MAP_CACHE_PREFIX = "wmdb:map-cache:"; +export const MAP_CACHE_TTL_MS = 1000 * 60 * 60 * 24 * 2; + +type StoredMapCache = { + data: T; + updatedAt: number; +}; + +function getMapCacheStorageKey(queryKey: readonly unknown[]) { + return `${MAP_CACHE_PREFIX}${JSON.stringify(queryKey)}`; +} + +export function readMapCache(queryKey: readonly unknown[]) { + if (typeof window === "undefined") return undefined; + + const storageKey = getMapCacheStorageKey(queryKey); + const rawValue = window.localStorage.getItem(storageKey); + if (!rawValue) return undefined; + + try { + const parsed = JSON.parse(rawValue) as StoredMapCache; + if ( + !parsed || + typeof parsed.updatedAt !== "number" || + Date.now() - parsed.updatedAt > MAP_CACHE_TTL_MS + ) { + window.localStorage.removeItem(storageKey); + return undefined; + } + + return parsed; + } catch { + window.localStorage.removeItem(storageKey); + return undefined; + } +} + +export function writeMapCache(queryKey: readonly unknown[], data: T) { + if (typeof window === "undefined") return; + + const storageKey = getMapCacheStorageKey(queryKey); + const value: StoredMapCache = { + data, + updatedAt: Date.now(), + }; + + window.localStorage.setItem(storageKey, JSON.stringify(value)); +} + +export function clearSavedQueryLocalStorage() { + if (typeof window === "undefined") return; + + const keysToRemove: string[] = []; + for (let i = 0; i < window.localStorage.length; i++) { + const key = window.localStorage.key(i); + if (key?.startsWith(MAP_CACHE_PREFIX)) { + keysToRemove.push(key); + } + } + + keysToRemove.forEach((key) => window.localStorage.removeItem(key)); +} diff --git a/frontend/src/utils/index.ts b/frontend/src/utils/index.ts index bd947722..d8fc91a4 100644 --- a/frontend/src/utils/index.ts +++ b/frontend/src/utils/index.ts @@ -3,6 +3,7 @@ export * from "./DateUtils"; export * from "./DataStreamUtils"; export * from "./EmptyToNull"; export * from "./HttpUtils"; +export * from "./MapCache"; export * from "./MapUrlState"; export * from "./GetMeterMarkerColor"; export * from "./GetRoleColor"; diff --git a/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx b/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx index a3fe7c00..200b7bf7 100644 --- a/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx +++ b/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx @@ -8,7 +8,7 @@ import { PatchActivitySubmit, SecurityScope, } from "@/interfaces"; -import { useUpdateActivity, useDeleteActivity } from "@/service/ApiServiceNew"; +import { useDeleteActivity, useUpdateActivity } from "@/service"; import dayjs from "dayjs"; import { enqueueSnackbar } from "notistack"; import { diff --git a/frontend/src/views/Meters/MeterSelection/MeterSelectionTable.tsx b/frontend/src/views/Meters/MeterSelection/MeterSelectionTable.tsx index a54008a5..0669c67b 100644 --- a/frontend/src/views/Meters/MeterSelection/MeterSelectionTable.tsx +++ b/frontend/src/views/Meters/MeterSelection/MeterSelectionTable.tsx @@ -8,7 +8,7 @@ import { useNavigate } from "@tanstack/react-router"; import { Route } from "@/routes/manage/meters"; import { MeterListQueryParams, SecurityScope } from "@/interfaces"; import { SortDirection, MeterSortByField, MeterStatusNames } from "@/enums"; -import { useGetMeterList } from "@/service/ApiServiceNew"; +import { useGetMeterList } from "@/service"; import { GridFooterWithButton } from "@/components"; interface MeterSelectionTableProps { diff --git a/frontend/src/views/Settings.tsx b/frontend/src/views/Settings.tsx index de271acf..f5b2f334 100644 --- a/frontend/src/views/Settings.tsx +++ b/frontend/src/views/Settings.tsx @@ -18,8 +18,8 @@ import { navConfig } from "@/constants"; import { useFetchWithAuth } from "@/hooks"; import { SecurityScope, UserSessionsResponse } from "@/interfaces"; import { Route } from "@/routes/settings"; -import { clearSavedQueryLocalStorage } from "@/service"; import { getTrackedSession } from "@/utils/SessionTracking"; +import { clearSavedQueryLocalStorage } from "@/utils"; import { KnownDevicesSection, PreferencesSection, diff --git a/frontend/src/views/WorkOrders/WorkOrdersTable.tsx b/frontend/src/views/WorkOrders/WorkOrdersTable.tsx index 468eeb5b..3c31e83b 100644 --- a/frontend/src/views/WorkOrders/WorkOrdersTable.tsx +++ b/frontend/src/views/WorkOrders/WorkOrdersTable.tsx @@ -15,7 +15,7 @@ import { useGetUserList, useDeleteWorkOrder, useCreateWorkOrder, -} from "@/service/ApiServiceNew"; +} from "@/service"; import { WorkOrderStatus } from "@/enums"; import { Autocomplete, From 49232ed6526e8829c4126364c7c9d589b0855f0c Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Sat, 21 Mar 2026 22:02:15 -0500 Subject: [PATCH 09/22] refactor(api): Refactor auth, models, routes, schemas, services --- api/.envexample | 5 +- api/auth/__init__.py | 15 + api/auth/dependencies.py | 13 + api/{ => auth}/session_tracking.py | 20 +- api/config.py | 3 + api/enums.py | 10 - api/main.py | 12 +- api/models/__init__.py | 74 ++ api/models/base.py | 11 + api/models/location.py | 66 ++ api/models/main_models.py | 845 ++--------------- api/models/meter.py | 231 +++++ api/models/part.py | 74 ++ api/models/user.py | 156 +++ api/models/well.py | 77 ++ api/models/work_order.py | 34 + api/routes/OSE.py | 561 +---------- api/routes/activities.py | 895 +----------------- api/routes/admin.py | 44 +- api/routes/chlorides.py | 48 +- api/routes/maintenance.py | 49 +- api/routes/meters.py | 191 +--- api/routes/notifications.py | 6 +- api/routes/parts.py | 806 ++++------------ api/routes/settings.py | 25 +- api/routes/user_sessions.py | 77 +- api/{route_util.py => routes/utils.py} | 0 api/routes/well_measurements.py | 538 +---------- api/routes/wells.py | 32 +- api/routes/work_orders.py | 77 ++ api/schemas/{admin_schemas.py => admin.py} | 0 api/schemas/chlorides.py | 18 + api/schemas/maintenance.py | 31 + api/schemas/{meter_schemas.py => meter.py} | 4 +- ...tification_schemas.py => notifications.py} | 2 +- api/schemas/ose.py | 50 + api/schemas/{part_schemas.py => parts.py} | 5 +- .../{security_schemas.py => security.py} | 0 api/schemas/settings.py | 14 + api/schemas/user_sessions.py | 52 + api/schemas/{well_schemas.py => well.py} | 2 +- api/security.py | 18 +- api/services/__init__.py | 1 + api/services/activities.py | 360 +++++++ api/services/meters.py | 98 ++ api/services/ose.py | 380 ++++++++ api/services/parts.py | 346 +++++++ api/services/storage.py | 154 +++ api/services/well_measurements.py | 467 +++++++++ api/services/work_orders.py | 254 +++++ 50 files changed, 3580 insertions(+), 3671 deletions(-) create mode 100644 api/auth/__init__.py create mode 100644 api/auth/dependencies.py rename api/{ => auth}/session_tracking.py (92%) create mode 100644 api/models/__init__.py create mode 100644 api/models/base.py create mode 100644 api/models/location.py create mode 100644 api/models/meter.py create mode 100644 api/models/part.py create mode 100644 api/models/user.py create mode 100644 api/models/well.py create mode 100644 api/models/work_order.py rename api/{route_util.py => routes/utils.py} (100%) create mode 100644 api/routes/work_orders.py rename api/schemas/{admin_schemas.py => admin.py} (100%) create mode 100644 api/schemas/chlorides.py create mode 100644 api/schemas/maintenance.py rename api/schemas/{meter_schemas.py => meter.py} (98%) rename api/schemas/{notification_schemas.py => notifications.py} (94%) create mode 100644 api/schemas/ose.py rename api/schemas/{part_schemas.py => parts.py} (97%) rename api/schemas/{security_schemas.py => security.py} (100%) create mode 100644 api/schemas/settings.py create mode 100644 api/schemas/user_sessions.py rename api/schemas/{well_schemas.py => well.py} (98%) create mode 100644 api/services/__init__.py create mode 100644 api/services/activities.py create mode 100644 api/services/meters.py create mode 100644 api/services/ose.py create mode 100644 api/services/parts.py create mode 100644 api/services/storage.py create mode 100644 api/services/well_measurements.py create mode 100644 api/services/work_orders.py diff --git a/api/.envexample b/api/.envexample index 1117d106..da8708a2 100644 --- a/api/.envexample +++ b/api/.envexample @@ -4,5 +4,8 @@ POSTGRES_PASSWORD= POSTGRES_HOST= POSTGRES_PORT= POSTGRES_DB= +JWT_SECRET_KEY= +JWT_ALGORITHM=HS256 +ACCESS_TOKEN_EXPIRE_HOURS=8 SETUP_DB=1 -POPULATE_DB=1 \ No newline at end of file +POPULATE_DB=1 diff --git a/api/auth/__init__.py b/api/auth/__init__.py new file mode 100644 index 00000000..9855b8c1 --- /dev/null +++ b/api/auth/__init__.py @@ -0,0 +1,15 @@ +from .dependencies import ScopedUser +from .session_tracking import ( + LAST_SEEN_UPDATE_INTERVAL, + create_user_session, + mark_session_signed_out, + touch_user_session, +) + +__all__ = [ + "ScopedUser", + "LAST_SEEN_UPDATE_INTERVAL", + "create_user_session", + "mark_session_signed_out", + "touch_user_session", +] diff --git a/api/auth/dependencies.py b/api/auth/dependencies.py new file mode 100644 index 00000000..a0848c83 --- /dev/null +++ b/api/auth/dependencies.py @@ -0,0 +1,13 @@ +from enum import Enum + +from api.security import scoped_user + + +class ScopedUser(Enum): + Read = scoped_user(["read"]) + Admin = scoped_user(["admin"]) + OSE = scoped_user(["ose"]) + ActivityWrite = scoped_user(["activities:write"]) + WellMeasurementWrite = scoped_user(["well_measurement:write"]) + MeterWrite = scoped_user(["meters:write"]) + WellWrite = scoped_user(["well:write"]) diff --git a/api/session_tracking.py b/api/auth/session_tracking.py similarity index 92% rename from api/session_tracking.py rename to api/auth/session_tracking.py index aa26be5f..92bbf8b6 100644 --- a/api/session_tracking.py +++ b/api/auth/session_tracking.py @@ -7,7 +7,7 @@ from fastapi import Request from sqlalchemy.orm import Session -from api.models.main_models import SignOutReasonTypeLU, UserSessions, Users +from api.models.user import SignOutReasonTypeLU, UserSessions, Users LAST_SEEN_UPDATE_INTERVAL = timedelta(minutes=5) @@ -113,12 +113,12 @@ def create_user_session(db: Session, user: Users, request: Request) -> UserSessi operating_system = normalize_header_value( request.headers.get("x-operating-system") ) or parse_operating_system(user_agent) - device_type = normalize_header_value(request.headers.get("x-device-type")) or parse_device_type( - user_agent - ) - device_label = normalize_header_value(request.headers.get("x-device-label")) or build_device_label( - browser, operating_system, device_type - ) + device_type = normalize_header_value( + request.headers.get("x-device-type") + ) or parse_device_type(user_agent) + device_label = normalize_header_value( + request.headers.get("x-device-label") + ) or build_device_label(browser, operating_system, device_type) fingerprint_hash = normalize_header_value( request.headers.get("x-device-fingerprint") ) @@ -178,7 +178,11 @@ def mark_session_signed_out( if not session: return None - if fingerprint_hash and session.fingerprint_hash and session.fingerprint_hash != fingerprint_hash: + if ( + fingerprint_hash + and session.fingerprint_hash + and session.fingerprint_hash != fingerprint_hash + ): return None if session.signed_out_at is not None: diff --git a/api/config.py b/api/config.py index a5011828..95e643ce 100644 --- a/api/config.py +++ b/api/config.py @@ -26,6 +26,9 @@ class Settings: "POSTGRES_PORT", 5432 ) # default postgres port is 5432 POSTGRES_DB: str = os.getenv("POSTGRES_DB") + JWT_SECRET_KEY: str | None = os.getenv("JWT_SECRET_KEY") + JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256") + ACCESS_TOKEN_EXPIRE_HOURS: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_HOURS", "8")) DATABASE_URL = f"postgresql+psycopg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}" diff --git a/api/enums.py b/api/enums.py index 4b545d0d..6c31a63b 100644 --- a/api/enums.py +++ b/api/enums.py @@ -1,5 +1,4 @@ from enum import Enum -from api.security import scoped_user class MeterSortByField(Enum): @@ -31,15 +30,6 @@ class SortDirection(Enum): Descending = "desc" -class ScopedUser(Enum): - Read = scoped_user(["read"]) - Admin = scoped_user(["admin"]) - OSE = scoped_user(["ose"]) - ActivityWrite = scoped_user(["activities:write"]) - WellMeasurementWrite = scoped_user(["well_measurement:write"]) - MeterWrite = scoped_user(["meters:write"]) - WellWrite = scoped_user(["well:write"]) - class WorkOrderStatus(Enum): Open = "Open" Closed = "Closed" diff --git a/api/main.py b/api/main.py index f90acc6e..facd1a99 100644 --- a/api/main.py +++ b/api/main.py @@ -4,8 +4,8 @@ from fastapi_pagination import add_pagination from fastapi.middleware.cors import CORSMiddleware from starlette import status -from api.schemas import security_schemas -from api.models.main_models import Users +from api.schemas import security as security_schema +from api.models.user import Users from api.routes.activities import activity_router, public_activity_router from api.routes.admin import admin_router from api.routes.chlorides import authenticated_chlorides_router, public_chlorides_router @@ -19,11 +19,13 @@ from api.routes.parts import part_router from api.routes.settings import settings_router from api.routes.user_sessions import user_sessions_router +from api.routes.work_orders import work_orders_router from api.routes.well_measurements import ( authenticated_well_measurement_router, public_well_measurement_router, ) from api.routes.wells import authenticated_well_router, public_well_router +from api.auth.session_tracking import create_user_session, touch_user_session from api.security import ( authenticate_user, create_access_token, @@ -32,7 +34,6 @@ get_session_identifier_from_token, ) from api.session import get_db, SessionLocal -from api.session_tracking import create_user_session, touch_user_session from sqlalchemy.orm import Session tags_metadata = [ @@ -86,7 +87,7 @@ # ============== Security ============== -@app.post("/token", response_model=security_schemas.Token, tags=["Login"]) +@app.post("/token", response_model=security_schema.Token, tags=["Login"]) def login_for_access_token( request: Request, form_data: OAuth2PasswordRequestForm = Depends(), @@ -119,7 +120,7 @@ def login_for_access_token( }, expires_delta=timedelta(hours=ACCESS_TOKEN_EXPIRE_HOURS), ) - user_response = security_schemas.User(**user.__dict__) + user_response = security_schema.User(**user.__dict__) db.commit() return { @@ -161,6 +162,7 @@ async def update_user_session_last_seen(request: Request, call_next): authenticated_router.include_router(authenticated_meter_router) authenticated_router.include_router(notifications_router) authenticated_router.include_router(part_router) +authenticated_router.include_router(work_orders_router) authenticated_router.include_router(authenticated_well_measurement_router) authenticated_router.include_router(authenticated_well_router) authenticated_router.include_router(settings_router) diff --git a/api/models/__init__.py b/api/models/__init__.py new file mode 100644 index 00000000..ff65bc7f --- /dev/null +++ b/api/models/__init__.py @@ -0,0 +1,74 @@ +from api.models.base import Base +from api.models.location import LandOwners, Locations, LocationTypeLU +from api.models.meter import ( + ActivityTypeLU, + MeterActivities, + MeterActivityPhotos, + MeterObservations, + MeterStatusLU, + MeterTypeLU, + Meters, + NoteTypeLU, + Notes, + ObservedPropertyTypeLU, + PropertyUnits, + ServiceTypeLU, + ServicesPerformed, + Units, + meterRegisters, +) +from api.models.part import PartAssociation, PartTypeLU, Parts, PartsAdded, PartsUsed +from api.models.user import ( + NotificationTypeLU, + Notifications, + ScopesRoles, + SecurityScopes, + SignOutReasonTypeLU, + UserRoles, + UserSessions, + Users, +) +from api.models.well import WellMeasurements, Wells, WellStatus, WellUseLU, WaterSources +from api.models.work_order import workOrders, workOrderStatusLU + +__all__ = [ + "ActivityTypeLU", + "Base", + "LandOwners", + "Locations", + "LocationTypeLU", + "MeterActivities", + "MeterActivityPhotos", + "MeterObservations", + "MeterStatusLU", + "MeterTypeLU", + "Meters", + "NoteTypeLU", + "Notes", + "NotificationTypeLU", + "Notifications", + "ObservedPropertyTypeLU", + "PartAssociation", + "PartTypeLU", + "Parts", + "PartsAdded", + "PartsUsed", + "PropertyUnits", + "ScopesRoles", + "SecurityScopes", + "ServiceTypeLU", + "ServicesPerformed", + "SignOutReasonTypeLU", + "Units", + "UserRoles", + "UserSessions", + "Users", + "WellMeasurements", + "Wells", + "WellStatus", + "WellUseLU", + "WaterSources", + "meterRegisters", + "workOrders", + "workOrderStatusLU", +] diff --git a/api/models/base.py b/api/models/base.py new file mode 100644 index 00000000..ca755b8c --- /dev/null +++ b/api/models/base.py @@ -0,0 +1,11 @@ +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(DeclarativeBase): + """ + Base class for all models + - Adds id column on all tables + """ + + id: Mapped[int] = mapped_column(primary_key=True) + __name__: str diff --git a/api/models/location.py b/api/models/location.py new file mode 100644 index 00000000..5947f155 --- /dev/null +++ b/api/models/location.py @@ -0,0 +1,66 @@ +from sqlalchemy import Float, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship +from geoalchemy2.shape import to_shape + +from api.models.base import Base + + +class Locations(Base): + __tablename__ = "Locations" + + name: Mapped[str] = mapped_column(String) + trss: Mapped[str] = mapped_column(String) + latitude: Mapped[float] = mapped_column(Float, nullable=False) + longitude: Mapped[float] = mapped_column(Float, nullable=False) + township: Mapped[int] = mapped_column(Integer) + range: Mapped[int] = mapped_column(Integer) + section: Mapped[int] = mapped_column(Integer) + quarter: Mapped[int] = mapped_column(Integer) + half_quarter: Mapped[int] = mapped_column(Integer) + quarter_quarter: Mapped[int] = mapped_column(Integer) + + type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("LocationTypeLU.id"), nullable=False + ) + land_owner_id: Mapped[int] = mapped_column(Integer, ForeignKey("LandOwners.id")) + + land_owner: Mapped["LandOwners"] = relationship() + type: Mapped["LocationTypeLU"] = relationship() + + @property + def lat(self): + try: + return to_shape(self.geom).y + except BaseException: + return + + @property + def long(self): + try: + return to_shape(self.geom).x + except BaseException: + return + + @property + def location(self): + return f"{self.township}.{self.range}.{self.section}.{self.quarter}.{self.half_quarter}" + + +class LocationTypeLU(Base): + __tablename__ = "LocationTypeLU" + type_name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +class LandOwners(Base): + __tablename__ = "LandOwners" + contact_name: Mapped[str] = mapped_column(String) + organization: Mapped[str] = mapped_column(String) + address: Mapped[str] = mapped_column(String) + city: Mapped[str] = mapped_column(String) + state: Mapped[str] = mapped_column(String) + zip: Mapped[str] = mapped_column(String) + phone: Mapped[str] = mapped_column(String) + mobile: Mapped[str] = mapped_column(String) + email: Mapped[str] = mapped_column(String) + note: Mapped[str] = mapped_column(String) diff --git a/api/models/main_models.py b/api/models/main_models.py index 87c6b822..9b73ede9 100644 --- a/api/models/main_models.py +++ b/api/models/main_models.py @@ -1,772 +1,83 @@ -from sqlalchemy import ( - Column, - Integer, - String, - ForeignKey, - Float, - DateTime, - func, - Boolean, - Table, - Numeric, - Date, +from api.models import ( + ActivityTypeLU, + Base, + LandOwners, + Locations, + LocationTypeLU, + MeterActivities, + MeterActivityPhotos, + MeterObservations, + MeterStatusLU, + MeterTypeLU, + Meters, + NoteTypeLU, + Notes, + NotificationTypeLU, + Notifications, + ObservedPropertyTypeLU, + PartAssociation, + PartTypeLU, + Parts, + PartsAdded, + PartsUsed, + PropertyUnits, + ScopesRoles, + SecurityScopes, + ServiceTypeLU, + ServicesPerformed, + SignOutReasonTypeLU, + Units, + UserRoles, + UserSessions, + Users, + WellMeasurements, + Wells, + WellStatus, + WellUseLU, + WaterSources, + meterRegisters, + workOrders, + workOrderStatusLU, ) -from sqlalchemy.orm import ( - relationship, - DeclarativeBase, - mapped_column, - Mapped, - deferred, -) -from geoalchemy2.shape import to_shape -from datetime import date -from typing import Optional, List - - -class Base(DeclarativeBase): - """ - Base class for all models - - Adds id column on all tables - """ - - id: Mapped[int] = mapped_column(primary_key=True) - __name__: str - - -class PartTypeLU(Base): - """ - The types of parts - """ - - __tablename__ = "PartTypeLU" - name: Mapped[str] - description: Mapped[str] - - -# Association table that links meter types and their commonly used parts -# see https://docs.sqlalchemy.org/en/20/orm/basic_relationships.html#many-to-many -PartAssociation = Table( - "PartAssociation", - Base.metadata, - Column("part_id", ForeignKey("Parts.id"), nullable=False), - Column("meter_type_id", ForeignKey("MeterTypeLU.id"), nullable=False), -) - - -class Parts(Base): - """ - All parts - """ - - __tablename__ = "Parts" - - part_number: Mapped[str] = mapped_column(String, unique=True, nullable=False) - description: Mapped[Optional[str]] - vendor: Mapped[Optional[str]] - initial_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) - note: Mapped[Optional[str]] - in_use: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) - commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) - price: Mapped[Optional[float]] = mapped_column(Float) - - part_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("PartTypeLU.id"), nullable=False - ) - part_type: Mapped["PartTypeLU"] = relationship() - - # The meter types associated with this part - meter_types: Mapped[Optional[List["MeterTypeLU"]]] = relationship( - secondary=PartAssociation - ) - - parts_used_links: Mapped[list["PartsUsed"]] = relationship( - back_populates="part", - cascade="all, delete-orphan", - ) - - -class PartsUsed(Base): - __tablename__ = "PartsUsed" - - id: Mapped[int] = mapped_column(Integer, primary_key=True) - meter_activity_id: Mapped[int] = mapped_column( - ForeignKey("MeterActivities.id"), nullable=False - ) - part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) - - count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) - - part: Mapped["Parts"] = relationship(back_populates="parts_used_links") - meter_activity: Mapped["MeterActivities"] = relationship( - back_populates="parts_used_links" - ) - - -class PartsAdded(Base): - __tablename__ = "PartsAdded" - - id: Mapped[int] = mapped_column(Integer, primary_key=True) - part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) - - count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) - date: Mapped[date] = mapped_column(Date, nullable=False) # default handled by DB - note: Mapped[str | None] = mapped_column(String, nullable=True) - - part: Mapped["Parts"] = relationship - - -class ServiceTypeLU(Base): - """ - Describes the type of service performed during an activity - """ - - __tablename__ = "ServiceTypeLU" - service_name: Mapped[str] - description: Mapped[str] - - -# Association table that links meter activities and the services that were performed during -ServicesPerformed = Table( - "ServicesPerformed", - Base.metadata, - Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), - Column("service_type_id", ForeignKey("ServiceTypeLU.id"), nullable=False), -) - - -class NoteTypeLU(Base): - """ - Pre-defined notes that can be set on activities - """ - - __tablename__ = "NoteTypeLU" - note: Mapped[str] - details: Mapped[str] - - # Either one of the special 3 slugs that represent the working status of a meter or null - # working | not-working | not-checked | null - slug: Mapped[str] - - # Commonly Used determines what is displayed by default - commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) - -# Association table that links notes and the meter activity they were added to -Notes = Table( +__all__ = [ + "ActivityTypeLU", + "Base", + "LandOwners", + "Locations", + "LocationTypeLU", + "MeterActivities", + "MeterActivityPhotos", + "MeterObservations", + "MeterStatusLU", + "MeterTypeLU", + "Meters", + "NoteTypeLU", "Notes", - Base.metadata, - Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), - Column("note_type_id", ForeignKey("NoteTypeLU.id"), nullable=False), -) - - -class Meters(Base): - """ - Primary table for tracking meters - """ - - __tablename__ = "Meters" - serial_number: Mapped[str] = mapped_column(String, nullable=False) - # Contact information specific to particular meter - contact_name: Mapped[Optional[str]] = mapped_column(String) - contact_phone: Mapped[Optional[str]] = mapped_column(String) - notes: Mapped[Optional[str]] = mapped_column(String) - price: Mapped[Optional[float]] = mapped_column(Numeric(10, 2)) - - meter_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("MeterTypeLU.id"), nullable=False - ) - status_id: Mapped[int] = mapped_column( - Integer, ForeignKey("MeterStatusLU.id"), nullable=False - ) - well_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Wells.id"), nullable=False - ) - location_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Locations.id"), nullable=False - ) - register_id: Mapped[int] = mapped_column( - Integer, ForeignKey("meter_registers.id"), nullable=True - ) - - water_users: Mapped[Optional[str]] = mapped_column(String) - meter_owner: Mapped[Optional[str]] = mapped_column(String) - - meter_type: Mapped["MeterTypeLU"] = relationship() - meter_register: Mapped["meterRegisters"] = relationship() - status: Mapped["MeterStatusLU"] = relationship() - well: Mapped["Wells"] = relationship("Wells", back_populates="meters") - location: Mapped["Locations"] = relationship() - - -class MeterTypeLU(Base): - """ - Meter types - """ - - __tablename__ = "MeterTypeLU" - brand: Mapped[str] = mapped_column(String) - series: Mapped[str] = mapped_column(String) - model: Mapped[str] = mapped_column(String) - size: Mapped[float] = mapped_column(Float) - description: Mapped[str] = mapped_column(String) - in_use: Mapped[bool] = mapped_column(Boolean, nullable=False) - - -class MeterStatusLU(Base): - """ - Establishes if a meter is installed, in inventory, retired, or other options as needed. - """ - - __tablename__ = "MeterStatusLU" - status_name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -class MeterActivities(Base): - """ - Logs all meter activities - """ - - __tablename__ = "MeterActivities" - timestamp_start: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - timestamp_end: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - description: Mapped[DateTime] = mapped_column(String) - - submitting_user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id"), nullable=False - ) - meter_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Meters.id"), nullable=False - ) - activity_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("ActivityTypeLU.id"), nullable=False - ) - location_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Locations.id"), nullable=False - ) - ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) - water_users: Mapped[str] = mapped_column(String) - work_order_id: Mapped[int] = mapped_column(Integer, ForeignKey("work_orders.id")) - - submitting_user: Mapped["Users"] = relationship() - meter: Mapped["Meters"] = relationship() - activity_type: Mapped["ActivityTypeLU"] = relationship() - location: Mapped["Locations"] = relationship() - - services_performed: Mapped[List["ServiceTypeLU"]] = relationship( - "ServiceTypeLU", secondary=ServicesPerformed - ) - notes: Mapped[List["NoteTypeLU"]] = relationship("NoteTypeLU", secondary=Notes) - work_order: Mapped["workOrders"] = relationship() - well: Mapped["Wells"] = relationship( - "Wells", - primaryjoin="MeterActivities.location_id == Wells.location_id", - foreign_keys="MeterActivities.location_id", - viewonly=True, - ) - photos: Mapped[List["MeterActivityPhotos"]] = relationship( - "MeterActivityPhotos", back_populates="meter_activity", cascade="all, delete" - ) - - parts_used_links: Mapped[list["PartsUsed"]] = relationship( - back_populates="meter_activity", - cascade="all, delete-orphan", - ) - - -class MeterActivityPhotos(Base): - __tablename__ = "MeterActivityPhotos" - - id: Mapped[int] = mapped_column( - Integer, primary_key=True, index=True, autoincrement=True - ) - meter_activity_id: Mapped[int] = mapped_column( - Integer, ForeignKey("MeterActivities.id", ondelete="CASCADE"), nullable=False - ) - file_name: Mapped[str] = mapped_column(String, nullable=False) - gcs_path: Mapped[str] = mapped_column(String, nullable=False) - uploaded_at: Mapped[DateTime] = mapped_column( - DateTime(timezone=True), server_default=func.now() - ) - original_file_name = Column(String, nullable=True) - meter_activity: Mapped["MeterActivities"] = relationship( - "MeterActivities", back_populates="photos" - ) - - -class ActivityTypeLU(Base): - """ - Details the different types of activities PVACD implements - """ - - __tablename__ = "ActivityTypeLU" - name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - # Specifies who can perform this activity (must be either 'technician' or 'admin') - # If admin, only admins can perform, if technician then technician or admin can perform - permission: Mapped[str] = mapped_column(String) - - -class MeterObservations(Base): - """ - Tracks all observations associated with a meter - """ - - __tablename__ = "MeterObservations" - timestamp: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - value: Mapped[float] = mapped_column(Float, nullable=False) - notes: Mapped[str] = mapped_column(String) - ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) - - submitting_user_id: Mapped[int] = mapped_column(Integer, ForeignKey("Users.id")) - meter_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Meters.id"), nullable=False - ) - observed_property_type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False - ) - unit_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - location_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Locations.id"), nullable=False - ) - - submitting_user: Mapped["Users"] = relationship() - meter: Mapped["Meters"] = relationship() - observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() - unit: Mapped["Units"] = relationship() - location: Mapped["Locations"] = relationship() - - -class ObservedPropertyTypeLU(Base): - """ - Defines the types of observations made on a meter - """ - - __tablename__ = "ObservedPropertyTypeLU" - name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - context: Mapped[str] = mapped_column( - String - ) # Specifies if property associated with 'meter' or 'well' - - # The units that can be used on this property type - units: Mapped[List["Units"]] = relationship(secondary="PropertyUnits") - - -class Units(Base): - """ - Defines units used in observations - """ - - __tablename__ = "Units" - name: Mapped[str] = mapped_column(String) - name_short: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -PropertyUnits = Table( + "NotificationTypeLU", + "Notifications", + "ObservedPropertyTypeLU", + "PartAssociation", + "PartTypeLU", + "Parts", + "PartsAdded", + "PartsUsed", "PropertyUnits", - Base.metadata, - Column("property_id", ForeignKey("ObservedPropertyTypeLU.id"), nullable=False), - Column("unit_id", ForeignKey("Units.id"), nullable=False), -) - - -class Locations(Base): - """ - Table for tracking information about a well's location - """ - - __tablename__ = "Locations" - name: Mapped[str] = mapped_column(String) - trss: Mapped[str] = mapped_column(String) - latitude: Mapped[float] = mapped_column(Float, nullable=False) - longitude: Mapped[float] = mapped_column(Float, nullable=False) - township: Mapped[int] = mapped_column(Integer) - range: Mapped[int] = mapped_column(Integer) - section: Mapped[int] = mapped_column(Integer) - quarter: Mapped[int] = mapped_column(Integer) - half_quarter: Mapped[int] = mapped_column(Integer) - quarter_quarter: Mapped[int] = mapped_column(Integer) - - type_id: Mapped[int] = mapped_column( - Integer, ForeignKey("LocationTypeLU.id"), nullable=False - ) - land_owner_id: Mapped[int] = mapped_column(Integer, ForeignKey("LandOwners.id")) - - land_owner: Mapped["LandOwners"] = relationship() - type: Mapped["LocationTypeLU"] = relationship() - - @property - def lat(self): - try: - return to_shape(self.geom).y - except BaseException: - return - - @property - def long(self): - try: - return to_shape(self.geom).x - except BaseException: - return - - @property - def location(self): - return f"{self.township}.{self.range}.{self.section}.{self.quarter}.{self.half_quarter}" - - -class LocationTypeLU(Base): - """ - Defines the type of location, such as well - """ - - __tablename__ = "LocationTypeLU" - type_name: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -class LandOwners(Base): - """ - Organizations and people that have some relationship with a PVACD meter - - Typically irrigators? - """ - - __tablename__ = "LandOwners" - contact_name: Mapped[str] = mapped_column(String) - organization: Mapped[str] = mapped_column(String) - address: Mapped[str] = mapped_column(String) - city: Mapped[str] = mapped_column(String) - state: Mapped[str] = mapped_column(String) - zip: Mapped[str] = mapped_column(String) - phone: Mapped[str] = mapped_column(String) - mobile: Mapped[str] = mapped_column(String) - email: Mapped[str] = mapped_column(String) - note: Mapped[str] = mapped_column(String) - - -class Users(Base): - """ - All info about a user of the app - """ - - __tablename__ = "Users" - - full_name: Mapped[str] = mapped_column(String) - disabled: Mapped[bool] = mapped_column(Boolean, default=False) - username: Mapped[str] = deferred( - mapped_column(String, nullable=False) - ) # Defer sensitive info so it's not sent when it's included as part of a relationship - email: Mapped[str] = deferred(mapped_column(String)) - hashed_password: Mapped[str] = deferred(mapped_column(String, nullable=False)) - - user_role_id: Mapped[int] = deferred( - mapped_column(Integer, ForeignKey("UserRoles.id"), nullable=False) - ) - - user_role: Mapped["UserRoles"] = relationship("UserRoles") - display_name: Mapped[str] = mapped_column(String, nullable=True) - redirect_page: Mapped[str] = mapped_column(String, nullable=True, default="/") - avatar_img: Mapped[str] = mapped_column(String, nullable=True) - notifications: Mapped[List["Notifications"]] = relationship( - "Notifications", - back_populates="user", - cascade="all, delete-orphan", - foreign_keys="Notifications.user_id", - ) - created_notifications: Mapped[List["Notifications"]] = relationship( - "Notifications", - back_populates="creator", - foreign_keys="Notifications.created_by", - ) - user_sessions: Mapped[List["UserSessions"]] = relationship( - "UserSessions", - back_populates="user", - cascade="all, delete-orphan", - ) - - -class SignOutReasonTypeLU(Base): - __tablename__ = "sign_out_reason_type_lu" - - name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) - description: Mapped[Optional[str]] = mapped_column(String) - - user_sessions: Mapped[List["UserSessions"]] = relationship( - "UserSessions", back_populates="sign_out_reason_type" - ) - - -class UserSessions(Base): - __tablename__ = "user_sessions" - - user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True - ) - session_identifier: Mapped[str] = mapped_column( - String(36), nullable=False, unique=True, index=True - ) - ip_address: Mapped[Optional[str]] = mapped_column(String(255)) - user_agent: Mapped[Optional[str]] = mapped_column(String) - device_label: Mapped[Optional[str]] = mapped_column(String(255)) - device_type: Mapped[Optional[str]] = mapped_column(String(100)) - browser: Mapped[Optional[str]] = mapped_column(String(100)) - operating_system: Mapped[Optional[str]] = mapped_column(String(100)) - fingerprint_hash: Mapped[Optional[str]] = mapped_column(String(128), index=True) - signed_in_at: Mapped[DateTime] = mapped_column( - DateTime, nullable=False, server_default=func.now(), index=True - ) - last_seen_at: Mapped[DateTime] = mapped_column( - DateTime, nullable=False, server_default=func.now(), index=True - ) - signed_out_at: Mapped[Optional[DateTime]] = mapped_column(DateTime, index=True) - is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True) - sign_out_reason_type_id: Mapped[Optional[int]] = mapped_column( - Integer, - ForeignKey( - "sign_out_reason_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" - ), - index=True, - ) - - user: Mapped["Users"] = relationship("Users", back_populates="user_sessions") - sign_out_reason_type: Mapped[Optional["SignOutReasonTypeLU"]] = relationship( - "SignOutReasonTypeLU", back_populates="user_sessions" - ) - - -class NotificationTypeLU(Base): - __tablename__ = "notification_type_lu" - - name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) - description: Mapped[Optional[str]] = mapped_column(String) - - notifications: Mapped[List["Notifications"]] = relationship( - "Notifications", back_populates="notification_type" - ) - - -class Notifications(Base): - __tablename__ = "notifications" - - user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True - ) - notification_type_id: Mapped[int] = mapped_column( - Integer, - ForeignKey( - "notification_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" - ), - index=True, - ) - created_by: Mapped[Optional[int]] = mapped_column( - Integer, ForeignKey("Users.id", ondelete="SET NULL", onupdate="CASCADE"), index=True - ) - title: Mapped[str] = mapped_column(String(255), nullable=False) - message: Mapped[str] = mapped_column(String, nullable=False) - link: Mapped[Optional[str]] = mapped_column(String(500)) - is_read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, index=True) - created_at: Mapped[DateTime] = mapped_column( - DateTime, nullable=False, server_default=func.now(), index=True - ) - read_at: Mapped[Optional[DateTime]] = mapped_column(DateTime) - - user: Mapped["Users"] = relationship( - "Users", back_populates="notifications", foreign_keys=[user_id] - ) - creator: Mapped[Optional["Users"]] = relationship( - "Users", back_populates="created_notifications", foreign_keys=[created_by] - ) - notification_type: Mapped["NotificationTypeLU"] = relationship( - "NotificationTypeLU", back_populates="notifications" - ) - - -# Association table that links roles and their associated scopes -ScopesRoles = Table( "ScopesRoles", - Base.metadata, - Column("security_scope_id", ForeignKey("SecurityScopes.id"), nullable=False), - Column("user_role_id", ForeignKey("UserRoles.id"), nullable=False), -) - - -class SecurityScopes(Base): - """ - Individual permissions - """ - - __tablename__ = "SecurityScopes" - scope_string: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String) - - -class UserRoles(Base): - __tablename__ = "UserRoles" - name: Mapped[str] = mapped_column(String, nullable=False) - - # The scopes associated with a given role - security_scopes: Mapped[List["SecurityScopes"]] = relationship( - secondary=ScopesRoles - ) - - -class WellUseLU(Base): - """ - The type of well - """ - - __tablename__ = "WellUseLU" - use_type: Mapped[str] = mapped_column(String, nullable=False) - code: Mapped[str] = mapped_column(String) - description: Mapped[str] = mapped_column(String) - - -class WaterSources(Base): - """ - The source of water for a well - """ - - __tablename__ = "water_sources" - name: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String) - - -class WellStatus(Base): - """ - The status of a well - """ - - __tablename__ = "well_status" - status: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String) - - -class Wells(Base): - """ - All wells - """ - - __tablename__ = "Wells" - name: Mapped[str] = mapped_column(String) - ra_number: Mapped[str] = mapped_column( - String - ) # RA Number is an OSE well identifier - owners: Mapped[str] = mapped_column(String) - osetag: Mapped[str] = mapped_column(String) - casing: Mapped[str] = mapped_column(String) - total_depth: Mapped[float] = mapped_column(Float) - outside_recorder: Mapped[str] = mapped_column(Boolean) - - use_type_id: Mapped[int] = mapped_column(Integer, ForeignKey("WellUseLU.id")) - location_id: Mapped[int] = mapped_column(Integer, ForeignKey("Locations.id")) - water_source_id: Mapped[int] = mapped_column( - Integer, ForeignKey("water_sources.id") - ) - well_status_id: Mapped[int] = mapped_column(Integer, ForeignKey("well_status.id")) - chloride_group_id: Mapped[int] = mapped_column(Integer) - - use_type: Mapped["WellUseLU"] = relationship() - location: Mapped["Locations"] = relationship() - water_source: Mapped["WaterSources"] = relationship() - well_status: Mapped["WellStatus"] = relationship() - - meters: Mapped[List["Meters"]] = relationship("Meters", back_populates="well") - - -class WellMeasurements(Base): - """ - The measurements made on a monitored well - """ - - __tablename__ = "WellMeasurements" - timestamp: Mapped[DateTime] = mapped_column( - DateTime, default=func.now(), nullable=False - ) - value: Mapped[Optional[float]] = mapped_column(Float, nullable=True) - - observed_property_id: Mapped[int] = mapped_column( - Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False - ) - submitting_user_id: Mapped[Optional[int]] = mapped_column( - Integer, ForeignKey("Users.id"), nullable=True - ) - unit_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - well_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Wells.id"), nullable=False - ) - - observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() - submitting_user: Mapped["Users"] = relationship() - unit: Mapped["Units"] = relationship() - well: Mapped["Wells"] = relationship() - - -class workOrderStatusLU(Base): - """ - Models the status of a work order - """ - - __tablename__ = "work_order_status_lu" - name = mapped_column(String, nullable=False) - description = mapped_column(String, nullable=False) - - -class workOrders(Base): - """ - Models work orders and associated information - """ - - __tablename__ = "work_orders" - date_created: Mapped[DateTime] = mapped_column(DateTime, nullable=False) - creator: Mapped[str] = mapped_column( - String, nullable=True - ) # There is no consistent list of persons for this, so it is nullable - title: Mapped[str] = mapped_column(String, nullable=False) - description: Mapped[str] = mapped_column(String, nullable=True) - meter_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Meters.id"), nullable=False - ) - status_id: Mapped[int] = mapped_column( - Integer, ForeignKey("work_order_status_lu.id"), nullable=False - ) - notes: Mapped[str] = mapped_column(String, nullable=True) - assigned_user_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Users.id"), nullable=True - ) - ose_request_id: Mapped[int] = mapped_column(Integer, nullable=True) - - meter: Mapped["Meters"] = relationship() - status: Mapped["workOrderStatusLU"] = relationship() - assigned_user: Mapped["Users"] = relationship() - - -class meterRegisters(Base): - """ - Models the registers of a meter - """ - - __tablename__ = "meter_registers" - brand: Mapped[str] = mapped_column(String, nullable=False) - meter_size: Mapped[float] = mapped_column(Float, nullable=False) - part_id: Mapped[int] = mapped_column(Integer, ForeignKey("Parts.id")) - ratio: Mapped[str] = mapped_column(String) - dial_units_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - totalizer_units_id: Mapped[int] = mapped_column( - Integer, ForeignKey("Units.id"), nullable=False - ) - number_of_digits: Mapped[int] = mapped_column(Integer, nullable=False) - decimal_digits: Mapped[int] = mapped_column(Integer) - multiplier: Mapped[float] = mapped_column(Float, nullable=False) - notes: Mapped[str] = mapped_column(String) - - dial_units: Mapped["Units"] = relationship(foreign_keys=[dial_units_id]) - totalizer_units: Mapped["Units"] = relationship(foreign_keys=[totalizer_units_id]) + "SecurityScopes", + "ServiceTypeLU", + "ServicesPerformed", + "SignOutReasonTypeLU", + "Units", + "UserRoles", + "UserSessions", + "Users", + "WellMeasurements", + "Wells", + "WellStatus", + "WellUseLU", + "WaterSources", + "meterRegisters", + "workOrders", + "workOrderStatusLU", +] diff --git a/api/models/meter.py b/api/models/meter.py new file mode 100644 index 00000000..a833af0c --- /dev/null +++ b/api/models/meter.py @@ -0,0 +1,231 @@ +from typing import List, Optional + +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Float, + ForeignKey, + Integer, + Numeric, + String, + Table, + func, +) +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class ServiceTypeLU(Base): + __tablename__ = "ServiceTypeLU" + service_name: Mapped[str] + description: Mapped[str] + + +ServicesPerformed = Table( + "ServicesPerformed", + Base.metadata, + Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), + Column("service_type_id", ForeignKey("ServiceTypeLU.id"), nullable=False), +) + + +class NoteTypeLU(Base): + __tablename__ = "NoteTypeLU" + note: Mapped[str] + details: Mapped[str] + slug: Mapped[str] + commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + + +Notes = Table( + "Notes", + Base.metadata, + Column("meter_activity_id", ForeignKey("MeterActivities.id"), nullable=False), + Column("note_type_id", ForeignKey("NoteTypeLU.id"), nullable=False), +) + + +class Meters(Base): + __tablename__ = "Meters" + serial_number: Mapped[str] = mapped_column(String, nullable=False) + contact_name: Mapped[Optional[str]] = mapped_column(String) + contact_phone: Mapped[Optional[str]] = mapped_column(String) + notes: Mapped[Optional[str]] = mapped_column(String) + price: Mapped[Optional[float]] = mapped_column(Numeric(10, 2)) + + meter_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("MeterTypeLU.id"), nullable=False + ) + status_id: Mapped[int] = mapped_column( + Integer, ForeignKey("MeterStatusLU.id"), nullable=False + ) + well_id: Mapped[int] = mapped_column(Integer, ForeignKey("Wells.id"), nullable=False) + location_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Locations.id"), nullable=False + ) + register_id: Mapped[int] = mapped_column( + Integer, ForeignKey("meter_registers.id"), nullable=True + ) + water_users: Mapped[Optional[str]] = mapped_column(String) + meter_owner: Mapped[Optional[str]] = mapped_column(String) + + meter_type: Mapped["MeterTypeLU"] = relationship() + meter_register: Mapped["meterRegisters"] = relationship() + status: Mapped["MeterStatusLU"] = relationship() + well: Mapped["Wells"] = relationship("Wells", back_populates="meters") + location: Mapped["Locations"] = relationship() + + +class MeterTypeLU(Base): + __tablename__ = "MeterTypeLU" + brand: Mapped[str] = mapped_column(String) + series: Mapped[str] = mapped_column(String) + model: Mapped[str] = mapped_column(String) + size: Mapped[float] = mapped_column(Float) + description: Mapped[str] = mapped_column(String) + in_use: Mapped[bool] = mapped_column(Boolean, nullable=False) + + +class MeterStatusLU(Base): + __tablename__ = "MeterStatusLU" + status_name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +class MeterActivities(Base): + __tablename__ = "MeterActivities" + timestamp_start: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + timestamp_end: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + description: Mapped[DateTime] = mapped_column(String) + submitting_user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id"), nullable=False + ) + meter_id: Mapped[int] = mapped_column(Integer, ForeignKey("Meters.id"), nullable=False) + activity_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ActivityTypeLU.id"), nullable=False + ) + location_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Locations.id"), nullable=False + ) + ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) + water_users: Mapped[str] = mapped_column(String) + work_order_id: Mapped[int] = mapped_column(Integer, ForeignKey("work_orders.id")) + + submitting_user: Mapped["Users"] = relationship() + meter: Mapped["Meters"] = relationship() + activity_type: Mapped["ActivityTypeLU"] = relationship() + location: Mapped["Locations"] = relationship() + services_performed: Mapped[List["ServiceTypeLU"]] = relationship( + "ServiceTypeLU", secondary=ServicesPerformed + ) + notes: Mapped[List["NoteTypeLU"]] = relationship("NoteTypeLU", secondary=Notes) + work_order: Mapped["workOrders"] = relationship() + well: Mapped["Wells"] = relationship( + "Wells", + primaryjoin="MeterActivities.location_id == Wells.location_id", + foreign_keys="MeterActivities.location_id", + viewonly=True, + ) + photos: Mapped[List["MeterActivityPhotos"]] = relationship( + "MeterActivityPhotos", back_populates="meter_activity", cascade="all, delete" + ) + parts_used_links: Mapped[list["PartsUsed"]] = relationship( + back_populates="meter_activity", + cascade="all, delete-orphan", + ) + + +class MeterActivityPhotos(Base): + __tablename__ = "MeterActivityPhotos" + + id: Mapped[int] = mapped_column( + Integer, primary_key=True, index=True, autoincrement=True + ) + meter_activity_id: Mapped[int] = mapped_column( + Integer, ForeignKey("MeterActivities.id", ondelete="CASCADE"), nullable=False + ) + file_name: Mapped[str] = mapped_column(String, nullable=False) + gcs_path: Mapped[str] = mapped_column(String, nullable=False) + uploaded_at: Mapped[DateTime] = mapped_column( + DateTime(timezone=True), server_default=func.now() + ) + original_file_name = Column(String, nullable=True) + meter_activity: Mapped["MeterActivities"] = relationship( + "MeterActivities", back_populates="photos" + ) + + +class ActivityTypeLU(Base): + __tablename__ = "ActivityTypeLU" + name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + permission: Mapped[str] = mapped_column(String) + + +class MeterObservations(Base): + __tablename__ = "MeterObservations" + timestamp: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + value: Mapped[float] = mapped_column(Float, nullable=False) + notes: Mapped[str] = mapped_column(String) + ose_share: Mapped[bool] = mapped_column(Boolean, nullable=False) + submitting_user_id: Mapped[int] = mapped_column(Integer, ForeignKey("Users.id")) + meter_id: Mapped[int] = mapped_column(Integer, ForeignKey("Meters.id"), nullable=False) + observed_property_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False + ) + unit_id: Mapped[int] = mapped_column(Integer, ForeignKey("Units.id"), nullable=False) + location_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Locations.id"), nullable=False + ) + + submitting_user: Mapped["Users"] = relationship() + meter: Mapped["Meters"] = relationship() + observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() + unit: Mapped["Units"] = relationship() + location: Mapped["Locations"] = relationship() + + +class ObservedPropertyTypeLU(Base): + __tablename__ = "ObservedPropertyTypeLU" + name: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + context: Mapped[str] = mapped_column(String) + units: Mapped[List["Units"]] = relationship(secondary="PropertyUnits") + + +class Units(Base): + __tablename__ = "Units" + name: Mapped[str] = mapped_column(String) + name_short: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +PropertyUnits = Table( + "PropertyUnits", + Base.metadata, + Column("property_id", ForeignKey("ObservedPropertyTypeLU.id"), nullable=False), + Column("unit_id", ForeignKey("Units.id"), nullable=False), +) + + +class meterRegisters(Base): + __tablename__ = "meter_registers" + brand: Mapped[str] = mapped_column(String, nullable=False) + meter_size: Mapped[float] = mapped_column(Float, nullable=False) + part_id: Mapped[int] = mapped_column(Integer, ForeignKey("Parts.id")) + ratio: Mapped[str] = mapped_column(String) + dial_units_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Units.id"), nullable=False + ) + totalizer_units_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Units.id"), nullable=False + ) + number_of_digits: Mapped[int] = mapped_column(Integer, nullable=False) + decimal_digits: Mapped[int] = mapped_column(Integer) + multiplier: Mapped[float] = mapped_column(Float, nullable=False) + notes: Mapped[str] = mapped_column(String) + + dial_units: Mapped["Units"] = relationship(foreign_keys=[dial_units_id]) + totalizer_units: Mapped["Units"] = relationship(foreign_keys=[totalizer_units_id]) diff --git a/api/models/part.py b/api/models/part.py new file mode 100644 index 00000000..dfa5144d --- /dev/null +++ b/api/models/part.py @@ -0,0 +1,74 @@ +from datetime import date +from typing import List, Optional + +from sqlalchemy import Boolean, Column, Date, Float, ForeignKey, Integer, String, Table +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class PartTypeLU(Base): + __tablename__ = "PartTypeLU" + name: Mapped[str] + description: Mapped[str] + + +PartAssociation = Table( + "PartAssociation", + Base.metadata, + Column("part_id", ForeignKey("Parts.id"), nullable=False), + Column("meter_type_id", ForeignKey("MeterTypeLU.id"), nullable=False), +) + + +class Parts(Base): + __tablename__ = "Parts" + + part_number: Mapped[str] = mapped_column(String, unique=True, nullable=False) + description: Mapped[Optional[str]] + vendor: Mapped[Optional[str]] + initial_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + note: Mapped[Optional[str]] + in_use: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) + commonly_used: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + price: Mapped[Optional[float]] = mapped_column(Float) + + part_type_id: Mapped[int] = mapped_column( + Integer, ForeignKey("PartTypeLU.id"), nullable=False + ) + part_type: Mapped["PartTypeLU"] = relationship() + meter_types: Mapped[Optional[List["MeterTypeLU"]]] = relationship( + secondary=PartAssociation + ) + parts_used_links: Mapped[list["PartsUsed"]] = relationship( + back_populates="part", + cascade="all, delete-orphan", + ) + + +class PartsUsed(Base): + __tablename__ = "PartsUsed" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + meter_activity_id: Mapped[int] = mapped_column( + ForeignKey("MeterActivities.id"), nullable=False + ) + part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) + count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) + + part: Mapped["Parts"] = relationship(back_populates="parts_used_links") + meter_activity: Mapped["MeterActivities"] = relationship( + back_populates="parts_used_links" + ) + + +class PartsAdded(Base): + __tablename__ = "PartsAdded" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + part_id: Mapped[int] = mapped_column(ForeignKey("Parts.id"), nullable=False) + count: Mapped[int] = mapped_column(Integer, nullable=False, default=1) + date: Mapped[date] = mapped_column(Date, nullable=False) + note: Mapped[str | None] = mapped_column(String, nullable=True) + + part: Mapped["Parts"] = relationship() diff --git a/api/models/user.py b/api/models/user.py new file mode 100644 index 00000000..9d68c927 --- /dev/null +++ b/api/models/user.py @@ -0,0 +1,156 @@ +from typing import List, Optional + +from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table, func +from sqlalchemy.orm import Mapped, deferred, mapped_column, relationship + +from api.models.base import Base + + +class Users(Base): + __tablename__ = "Users" + + full_name: Mapped[str] = mapped_column(String) + disabled: Mapped[bool] = mapped_column(Boolean, default=False) + username: Mapped[str] = deferred(mapped_column(String, nullable=False)) + email: Mapped[str] = deferred(mapped_column(String)) + hashed_password: Mapped[str] = deferred(mapped_column(String, nullable=False)) + user_role_id: Mapped[int] = deferred( + mapped_column(Integer, ForeignKey("UserRoles.id"), nullable=False) + ) + + user_role: Mapped["UserRoles"] = relationship("UserRoles") + display_name: Mapped[str] = mapped_column(String, nullable=True) + redirect_page: Mapped[str] = mapped_column(String, nullable=True, default="/") + avatar_img: Mapped[str] = mapped_column(String, nullable=True) + notifications: Mapped[List["Notifications"]] = relationship( + "Notifications", + back_populates="user", + cascade="all, delete-orphan", + foreign_keys="Notifications.user_id", + ) + created_notifications: Mapped[List["Notifications"]] = relationship( + "Notifications", + back_populates="creator", + foreign_keys="Notifications.created_by", + ) + user_sessions: Mapped[List["UserSessions"]] = relationship( + "UserSessions", + back_populates="user", + cascade="all, delete-orphan", + ) + + +class SignOutReasonTypeLU(Base): + __tablename__ = "sign_out_reason_type_lu" + + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[Optional[str]] = mapped_column(String) + user_sessions: Mapped[List["UserSessions"]] = relationship( + "UserSessions", back_populates="sign_out_reason_type" + ) + + +class UserSessions(Base): + __tablename__ = "user_sessions" + + user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True + ) + session_identifier: Mapped[str] = mapped_column( + String(36), nullable=False, unique=True, index=True + ) + ip_address: Mapped[Optional[str]] = mapped_column(String(255)) + user_agent: Mapped[Optional[str]] = mapped_column(String) + device_label: Mapped[Optional[str]] = mapped_column(String(255)) + device_type: Mapped[Optional[str]] = mapped_column(String(100)) + browser: Mapped[Optional[str]] = mapped_column(String(100)) + operating_system: Mapped[Optional[str]] = mapped_column(String(100)) + fingerprint_hash: Mapped[Optional[str]] = mapped_column(String(128), index=True) + signed_in_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + last_seen_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + signed_out_at: Mapped[Optional[DateTime]] = mapped_column(DateTime, index=True) + is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True) + sign_out_reason_type_id: Mapped[Optional[int]] = mapped_column( + Integer, + ForeignKey( + "sign_out_reason_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" + ), + index=True, + ) + + user: Mapped["Users"] = relationship("Users", back_populates="user_sessions") + sign_out_reason_type: Mapped[Optional["SignOutReasonTypeLU"]] = relationship( + "SignOutReasonTypeLU", back_populates="user_sessions" + ) + + +class NotificationTypeLU(Base): + __tablename__ = "notification_type_lu" + + name: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + description: Mapped[Optional[str]] = mapped_column(String) + notifications: Mapped[List["Notifications"]] = relationship( + "Notifications", back_populates="notification_type" + ) + + +class Notifications(Base): + __tablename__ = "notifications" + + user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="CASCADE", onupdate="CASCADE"), index=True + ) + notification_type_id: Mapped[int] = mapped_column( + Integer, + ForeignKey( + "notification_type_lu.id", ondelete="RESTRICT", onupdate="CASCADE" + ), + index=True, + ) + created_by: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("Users.id", ondelete="SET NULL", onupdate="CASCADE"), index=True + ) + title: Mapped[str] = mapped_column(String(255), nullable=False) + message: Mapped[str] = mapped_column(String, nullable=False) + link: Mapped[Optional[str]] = mapped_column(String(500)) + is_read: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, index=True) + created_at: Mapped[DateTime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), index=True + ) + read_at: Mapped[Optional[DateTime]] = mapped_column(DateTime) + + user: Mapped["Users"] = relationship( + "Users", back_populates="notifications", foreign_keys=[user_id] + ) + creator: Mapped[Optional["Users"]] = relationship( + "Users", back_populates="created_notifications", foreign_keys=[created_by] + ) + notification_type: Mapped["NotificationTypeLU"] = relationship( + "NotificationTypeLU", back_populates="notifications" + ) + + +ScopesRoles = Table( + "ScopesRoles", + Base.metadata, + Column("security_scope_id", ForeignKey("SecurityScopes.id"), nullable=False), + Column("user_role_id", ForeignKey("UserRoles.id"), nullable=False), +) + + +class SecurityScopes(Base): + __tablename__ = "SecurityScopes" + scope_string: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String) + + +class UserRoles(Base): + __tablename__ = "UserRoles" + name: Mapped[str] = mapped_column(String, nullable=False) + security_scopes: Mapped[List["SecurityScopes"]] = relationship( + secondary=ScopesRoles + ) diff --git a/api/models/well.py b/api/models/well.py new file mode 100644 index 00000000..16cdb3a3 --- /dev/null +++ b/api/models/well.py @@ -0,0 +1,77 @@ +from typing import List, Optional + +from sqlalchemy import Boolean, DateTime, Float, ForeignKey, Integer, String, func +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class WellUseLU(Base): + __tablename__ = "WellUseLU" + use_type: Mapped[str] = mapped_column(String, nullable=False) + code: Mapped[str] = mapped_column(String) + description: Mapped[str] = mapped_column(String) + + +class WaterSources(Base): + __tablename__ = "water_sources" + name: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String) + + +class WellStatus(Base): + __tablename__ = "well_status" + status: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String) + + +class Wells(Base): + __tablename__ = "Wells" + + name: Mapped[str] = mapped_column(String) + ra_number: Mapped[str] = mapped_column(String) + owners: Mapped[str] = mapped_column(String) + osetag: Mapped[str] = mapped_column(String) + casing: Mapped[str] = mapped_column(String) + total_depth: Mapped[float] = mapped_column(Float) + outside_recorder: Mapped[str] = mapped_column(Boolean) + + use_type_id: Mapped[int] = mapped_column(Integer, ForeignKey("WellUseLU.id")) + location_id: Mapped[int] = mapped_column(Integer, ForeignKey("Locations.id")) + water_source_id: Mapped[int] = mapped_column( + Integer, ForeignKey("water_sources.id") + ) + well_status_id: Mapped[int] = mapped_column(Integer, ForeignKey("well_status.id")) + chloride_group_id: Mapped[int] = mapped_column(Integer) + + use_type: Mapped["WellUseLU"] = relationship() + location: Mapped["Locations"] = relationship() + water_source: Mapped["WaterSources"] = relationship() + well_status: Mapped["WellStatus"] = relationship() + meters: Mapped[List["Meters"]] = relationship("Meters", back_populates="well") + + +class WellMeasurements(Base): + __tablename__ = "WellMeasurements" + + timestamp: Mapped[DateTime] = mapped_column( + DateTime, default=func.now(), nullable=False + ) + value: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + observed_property_id: Mapped[int] = mapped_column( + Integer, ForeignKey("ObservedPropertyTypeLU.id"), nullable=False + ) + submitting_user_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("Users.id"), nullable=True + ) + unit_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Units.id"), nullable=False + ) + well_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Wells.id"), nullable=False + ) + + observed_property: Mapped["ObservedPropertyTypeLU"] = relationship() + submitting_user: Mapped["Users"] = relationship() + unit: Mapped["Units"] = relationship() + well: Mapped["Wells"] = relationship() diff --git a/api/models/work_order.py b/api/models/work_order.py new file mode 100644 index 00000000..eade6ca5 --- /dev/null +++ b/api/models/work_order.py @@ -0,0 +1,34 @@ +from sqlalchemy import DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from api.models.base import Base + + +class workOrderStatusLU(Base): + __tablename__ = "work_order_status_lu" + name: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String, nullable=False) + + +class workOrders(Base): + __tablename__ = "work_orders" + + date_created: Mapped[DateTime] = mapped_column(DateTime, nullable=False) + creator: Mapped[str] = mapped_column(String, nullable=True) + title: Mapped[str] = mapped_column(String, nullable=False) + description: Mapped[str] = mapped_column(String, nullable=True) + meter_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Meters.id"), nullable=False + ) + status_id: Mapped[int] = mapped_column( + Integer, ForeignKey("work_order_status_lu.id"), nullable=False + ) + notes: Mapped[str] = mapped_column(String, nullable=True) + assigned_user_id: Mapped[int] = mapped_column( + Integer, ForeignKey("Users.id"), nullable=True + ) + ose_request_id: Mapped[int] = mapped_column(Integer, nullable=True) + + meter: Mapped["Meters"] = relationship() + status: Mapped["workOrderStatusLU"] = relationship() + assigned_user: Mapped["Users"] = relationship() diff --git a/api/routes/OSE.py b/api/routes/OSE.py index d4e90589..09b4e2e2 100644 --- a/api/routes/OSE.py +++ b/api/routes/OSE.py @@ -1,215 +1,20 @@ -from datetime import datetime, date, time +from datetime import datetime -from pydantic import BaseModel, Field -from fastapi import Depends, APIRouter, HTTPException, Query -from sqlalchemy import select, and_ -from sqlalchemy.orm import Session, joinedload, selectinload +from fastapi import Depends, APIRouter, Query +from sqlalchemy.orm import Session -from api.models.main_models import ( - Meters, - MeterActivities, - MeterObservations, - Wells, - meterRegisters, - workOrders, - ActivityTypeLU, - ObservedPropertyTypeLU, - ServiceTypeLU, - NoteTypeLU, - MeterStatusLU, -) - -from api.schemas import meter_schemas +from api.schemas import meter, ose from api.session import get_db -from api.enums import ScopedUser - -import os - - -API_BASE_URL = os.getenv("API_BASE_URL", "") +from api.auth.dependencies import ScopedUser +from api.services import ose as ose_service ose_router = APIRouter(dependencies=[Depends(ScopedUser.OSE)]) -class MeterActivityPhotoDTO(BaseModel): - name: str - url: str - - -class ObservationDTO(BaseModel): - observation_time: time # Will be associated with a given activity - observation_type: str - measurement: float - units: str - - -class ActivityDTO(BaseModel): - activity_id: int - ose_request_id: int | None = None - activity_start: datetime - activity_end: datetime - activity_type: str - well_ra_number: str | None - well_ose_tag: str | None - description: str - services: list[str] = Field(default_factory=list) - notes: list[str] = Field(default_factory=list) - parts_used: list[str] = Field(default_factory=list) - observations: list[ObservationDTO] = Field(default_factory=list) - meter_activity_photos: list[MeterActivityPhotoDTO] = Field(default_factory=list) - - -class MeterHistoryDTO(BaseModel): - serial_number: str - activities: list[ActivityDTO] = Field(default_factory=list) - - -class DateHistoryDTO(BaseModel): - date: date - meters: list[MeterHistoryDTO] = [] - - -class DisapprovalStatus(BaseModel): - """ - Returns the status of a disapproval request and response - """ - - ose_request_id: int - status: str - notes: str | None = None - disapproval_activity: ActivityDTO | None = None - new_activities: list[ActivityDTO] | None = None - - -def build_activity_photo_url(activity_id: int, photo_name: str) -> str: - return f"{API_BASE_URL}/activities/{activity_id}/photos/{photo_name}" - - -def getObservations( - activity_start: datetime, - activity_end: datetime, - meter_id: int, - observations: list[MeterObservations], -) -> list[ObservationDTO]: - """ - A function to return a list of observations that occurred during a given activity - """ - observations_list = [] - for observation in observations: - if ( - observation.timestamp >= activity_start - and observation.timestamp <= activity_end - and observation.meter_id == meter_id - ): - observation = ObservationDTO( - observation_time=observation.timestamp.time(), - observation_type=observation.observed_property.name, - measurement=observation.value, - units=observation.unit.name_short, - ) - observations_list.append(observation) - - return observations_list - - -def reorganizeHistory( - activities: list[MeterActivities], observations: list[MeterObservations] -) -> list[DateHistoryDTO]: - """ - A function to reorganize the data into the desired format for OSE history - """ - # Reorganize the data into dictionaries mapping date and meter serial number to activities history{date: {meter: [activities]}} - history = {} - for activity in activities: - date = activity.timestamp_start.strftime("%Y-%m-%d") - meter = activity.meter.serial_number - - if date not in history: - history[date] = {} - - if meter not in history[date]: - history[date][meter] = [] - - history[date][meter].append(activity) - - # Build the output list of DateHistoryDTO objects from the history dictionary - history_list = [] - for date, meters in history.items(): - meter_history_list = [] - for meter, activities in meters.items(): - meter_activity_list = [] - for activity in activities: - notes_strings = list(map(lambda note: note.note, activity.notes)) - parts_used_strings = list( - map( - lambda part: f"{part.part_type.name} ({part.part_number})", - activity.parts_used, - ) - ) - services_performed_strings = list( - map( - lambda service: service.service_name, - activity.services_performed, - ) - ) - activity_observations = getObservations( - activity.timestamp_start, - activity.timestamp_end, - activity.meter_id, - observations, - ) - - # Some activities are not associated with a well - # If well is none, set the well's RA number and OSE tag to None - if not activity.well: - ra_number = None - ose_tag = None - else: - ra_number = activity.well.ra_number - ose_tag = activity.well.osetag - - meter_activity_photos = [ - MeterActivityPhotoDTO( - name=p.file_name, - url=build_activity_photo_url(activity.id, p.file_name), - ) - for p in (activity.photos or []) - ] - - activity = ActivityDTO( - activity_id=activity.id, - ose_request_id=activity.work_order.ose_request_id - if activity.work_order - else None, - activity_type=activity.activity_type.name, - activity_start=activity.timestamp_start, - activity_end=activity.timestamp_end, - well_ra_number=ra_number, - well_ose_tag=ose_tag, - description=activity.description, - services=services_performed_strings, - notes=notes_strings, - parts_used=parts_used_strings, - observations=activity_observations, - meter_activity_photos=meter_activity_photos, - ) - meter_activity_list.append(activity) - - meter_history = MeterHistoryDTO( - serial_number=meter, activities=meter_activity_list - ) - meter_history_list.append(meter_history) - - date_history = DateHistoryDTO(date=date, meters=meter_history_list) - history_list.append(date_history) - - return history_list - - @ose_router.get( "/shared_meter_maintenance_history", - response_model=list[DateHistoryDTO], + response_model=list[ose.DateHistoryDTO], tags=["OSE"], ) def get_shared_history( @@ -221,61 +26,12 @@ def get_shared_history( Datetime Format ISO8601: YYYY-MM-DDTHH:MM:SS+HH:MM, example 2023-09-12T00:00:00+00:00 """ - # Get all activities in the date range - activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), - joinedload(MeterActivities.meter), - joinedload(MeterActivities.work_order), - joinedload(MeterActivities.well), - selectinload(MeterActivities.photos), - ) - .filter( - and_( - MeterActivities.timestamp_end >= start_datetime, - MeterActivities.timestamp_end <= end_datetime, - MeterActivities.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Get all observations in the date range - observations = ( - db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - joinedload(MeterObservations.meter), - ) - .filter( - and_( - MeterObservations.timestamp >= start_datetime, - MeterObservations.timestamp <= end_datetime, - MeterObservations.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Store results in list so we can iterate over it multiple times (as opposed to using the SQLAlchemy cursor) - activities_list = list(activities) - observations_list = list(observations) - - return reorganizeHistory(activities_list, observations_list) + return ose_service.get_shared_history(db, start_datetime, end_datetime) @ose_router.get( "/meter_maintenance_by_ose_request_id", - response_model=list[DateHistoryDTO], + response_model=list[ose.DateHistoryDTO], tags=["OSE"], ) def get_ose_maintenance_by_requestID( @@ -285,322 +41,37 @@ def get_ose_maintenance_by_requestID( Returns activities and meter readings for each OSE well associated with a given OSE request ID. """ - # Get all activities in the date range - activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), - joinedload(MeterActivities.meter).joinedload(Meters.well), - joinedload(MeterActivities.work_order), - ) - .join(workOrders) - .where( - and_( - workOrders.ose_request_id.in_(ose_request_ids), - MeterActivities.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Convert activities to a list so we can iterate over it multiple times (as opposed to using the SQLAlchemy cursor) - activities_list = list(activities) - - if not activities_list: - return [] - - # Since observations do no include the OSE request ID, figure out what observations are associated with the activities using a date range - activities_start_date = min( - [activity.timestamp_start for activity in activities_list] - ) - activities_end_date = max([activity.timestamp_end for activity in activities_list]) - - # Get all observations in the date range - observations = ( - db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - joinedload(MeterObservations.meter), - ) - .filter( - and_( - MeterObservations.timestamp >= activities_start_date, - MeterObservations.timestamp <= activities_end_date, - MeterObservations.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Store results in list so we can iterate over it multiple times (as opposed to using the SQLAlchemy cursor) - observations_list = list(observations) - - return reorganizeHistory(activities_list, observations_list) + return ose_service.get_maintenance_by_request_ids(db, ose_request_ids) @ose_router.get( "/meter_information", tags=["OSE"], - response_model=meter_schemas.PublicMeter, + response_model=meter.PublicMeter, ) def get_meter_information( serial_number: str, db: Session = Depends(get_db), ): - # Create the basic query - query = select(Meters).options( - joinedload(Meters.meter_type), - joinedload(Meters.well).joinedload(Wells.location), - joinedload(Meters.status), - joinedload(Meters.meter_register).joinedload(meterRegisters.dial_units), - joinedload(Meters.meter_register).joinedload(meterRegisters.totalizer_units), - ) - - query = query.filter(Meters.serial_number == serial_number) - - # Execute the query - meter = db.scalars(query).first() - - if not meter: - raise HTTPException(status_code=404, detail="Meter not found") - - # Manually create the response model because the object and response are organized differently - output_meter = meter_schemas.PublicMeter( - serial_number=meter.serial_number, - status=meter.status.status_name, - well=meter_schemas.PublicMeter.PublicWell( - ra_number=meter.well.ra_number, - osetag=meter.well.osetag, - trss=meter.well.location.trss, - longitude=meter.well.location.longitude, - latitude=meter.well.location.latitude, - ) - if meter.well - else None, - notes=meter.notes, - meter_type=meter_schemas.PublicMeter.MeterType( - brand=meter.meter_type.brand, - model=meter.meter_type.model, - size=meter.meter_type.size, - ), - meter_register=meter_schemas.PublicMeter.MeterRegister( - ratio=meter.meter_register.ratio, - number_of_digits=meter.meter_register.number_of_digits, - decimal_digits=meter.meter_register.decimal_digits, - dial_units=meter.meter_register.dial_units.name, - totalizer_units=meter.meter_register.totalizer_units.name, - multiplier=meter.meter_register.multiplier, - ) - if meter.meter_register - else None, - ) - - return output_meter + return ose_service.get_meter_information(db, serial_number) @ose_router.get( "/disapproval_response_by_request_id", tags=["OSE"], - response_model=DisapprovalStatus, + response_model=ose.DisapprovalStatus, ) def get_disapproval_response_by_request_id( ose_request_id: int, db: Session = Depends(get_db) ): - # Get the work order associated with the OSE request ID - work_order = db.scalars( - select(workOrders) - .options(joinedload(workOrders.status)) - .where(workOrders.ose_request_id == ose_request_id) - ).first() - - # Check if work order is a disapproval as determined by title "OSE Data Issue" - isDisapproval = work_order.title[:14] == "OSE Data Issue" - - if not work_order or not isDisapproval: - raise HTTPException(status_code=404, detail="Work order not found") - - # Get the activity that was originally disapproved of - # Not yet implemented return dummy ActivityDTO - disapproval_activity = ActivityDTO( - activity_id=99999, - activity_type="Disapproval", - activity_start=datetime.now(), - activity_end=datetime.now(), - well_ra_number=None, - well_ose_tag=None, - description="Not yet implemented, need activity ID in disapproval", - services=[], - notes=[], - parts_used=[], - observations=[], - ) - - # Get any new activities that are associated with the disapproval work order - new_activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), - joinedload(MeterActivities.meter).joinedload(Meters.well), - joinedload(MeterActivities.work_order), - ) - .where(MeterActivities.work_order_id == work_order.id) - ) - .unique() - .all() - ) - - # Loop through the new activities and create the ActivityDTO objects - # I also get observations for each activity, which might not be too performant - # but there will likely only be one new activity if any - new_activitiesDTO = [] - for na in new_activities: - notes_strings = list(map(lambda note: note.note, na.notes)) - parts_used_strings = list( - map( - lambda part: f"{part.part_type.name} ({part.part_number})", - na.parts_used, - ) - ) - services_performed_strings = list( - map( - lambda service: service.service_name, - na.services_performed, - ) - ) - - # Get observations for the meter in the time range of the activity - observations = ( - db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - ) - .filter( - and_( - MeterObservations.timestamp >= na.timestamp_start, - MeterObservations.timestamp <= na.timestamp_end, - MeterObservations.meter_id == na.meter_id, - MeterObservations.ose_share == True, - ) - ) - ) - .unique() - .all() - ) - - # Create the observation DTOs - activity_observations = [] - for observation in observations: - observation = ObservationDTO( - observation_time=observation.timestamp.time(), - observation_type=observation.observed_property.name, - measurement=observation.value, - units=observation.unit.name_short, - ) - activity_observations.append(observation) - - activity = ActivityDTO( - activity_id=na.id, - ose_request_id=na.work_order.ose_request_id if na.work_order else None, - activity_type=na.activity_type.name, - activity_start=na.timestamp_start, - activity_end=na.timestamp_end, - well_ra_number=na.meter.well.ra_number if na.meter.well else None, - well_ose_tag=na.meter.well.osetag if na.meter.well else None, - description=na.description, - services=services_performed_strings, - notes=notes_strings, - parts_used=parts_used_strings, - observations=activity_observations, - ) - new_activitiesDTO.append(activity) - - # Create the response model - response = DisapprovalStatus( - ose_request_id=work_order.ose_request_id, - status=work_order.status.name, - notes=work_order.notes, - disapproval_activity=disapproval_activity, - new_activities=new_activitiesDTO, - ) - - return response + return ose_service.get_disapproval_response(db, ose_request_id) @ose_router.get( - "/get_DB_types", tags=["OSE"], response_model=meter_schemas.DBTypesForOSE + "/get_DB_types", tags=["OSE"], response_model=meter.DBTypesForOSE ) def get_DB_types(db: Session = Depends(get_db)): """ Return DB types from lookup tables """ - # Load all the lookup tables - activity_types = db.scalars(select(ActivityTypeLU)).all() - observed_property_types = db.scalars(select(ObservedPropertyTypeLU)).all() - service_types = db.scalars(select(ServiceTypeLU)).all() - note_types = db.scalars(select(NoteTypeLU)).all() - meter_status_types = db.scalars(select(MeterStatusLU)).all() - - # Convert to - activity_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.name, description=x.description - ), - activity_types, - ) - ) - observed_property_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.name, description=x.description - ), - observed_property_types, - ) - ) - service_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.service_name, description=x.description - ), - service_types, - ) - ) - note_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.note, description=x.details - ), - note_types, - ) - ) - meter_status_types = list( - map( - lambda x: meter_schemas.DBTypesForOSE.GeneralTypeInfo( - name=x.status_name, description=x.description - ), - meter_status_types, - ) - ) - - # Create the response model - response = meter_schemas.DBTypesForOSE( - activity_types=activity_types, - observed_property_types=observed_property_types, - service_types=service_types, - note_types=note_types, - meter_status_types=meter_status_types, - ) - - return response + return ose_service.get_db_types(db) diff --git a/api/routes/activities.py b/api/routes/activities.py index becca741..b16c23a3 100644 --- a/api/routes/activities.py +++ b/api/routes/activities.py @@ -1,47 +1,21 @@ -from fastapi import Depends, APIRouter, Query, File, UploadFile, Form +from fastapi import Depends, APIRouter, File, UploadFile, Form from fastapi.exceptions import HTTPException from fastapi.responses import StreamingResponse -from sqlalchemy.orm import Session, joinedload, undefer -from sqlalchemy.exc import IntegrityError -from sqlalchemy import select, text, or_ -from datetime import datetime -from typing import List, Annotated -from api import security -from api.schemas import meter_schemas -from api.models.main_models import ( - Meters, - ObservedPropertyTypeLU, - Parts, - ActivityTypeLU, - Units, - MeterActivities, - MeterActivityPhotos, - MeterObservations, - ServiceTypeLU, - NoteTypeLU, - Wells, - Locations, - MeterStatusLU, - Users, - workOrders, - workOrderStatusLU, -) +from sqlalchemy.orm import Session +from typing import List +from api.schemas import meter +from api.models.user import Users from api.session import get_db from api.security import get_current_user -from api.enums import ScopedUser, WorkOrderStatus -from pathlib import Path -from google.cloud import storage +from api.services import activities as activity_service +from api.services import storage as storage_service +from api.auth.dependencies import ScopedUser -import uuid import json -import os activity_router = APIRouter() public_activity_router = APIRouter() -BUCKET_NAME = os.getenv("GCP_BUCKET_NAME", "") -PHOTO_PREFIX = os.getenv("GCP_PHOTO_PREFIX", "") - MAX_PHOTOS_PER_REQUEST = 2 MAX_PHOTOS_PER_METER = 6 @@ -52,50 +26,14 @@ async def get_activity_photo( photo_file_name: str, db: Session = Depends(get_db), ): - photo = ( - db.query(MeterActivityPhotos) - .filter( - MeterActivityPhotos.meter_activity_id == activity_id, - MeterActivityPhotos.file_name == photo_file_name, - ) - .first() - ) - - if not photo: - raise HTTPException(status_code=404, detail="Photo not found for this activity") - - try: - client = storage.Client() - bucket = client.bucket(BUCKET_NAME) - blob = bucket.blob(photo.gcs_path) - - # Optional: ensure blob exists (avoids returning empty/500) - if not blob.exists(client=client): - raise HTTPException( - status_code=404, detail="Photo file missing from storage" - ) - - # Pull content type from GCS metadata (fallback if absent) - blob.reload(client=client) - content_type = blob.content_type or "application/octet-stream" - - # 3) Stream back to client - file_obj = blob.open("rb") # streaming file-like object - - # Inline display; if you want download behavior change to 'attachment' - headers = {"Content-Disposition": f'inline; filename="{photo.file_name}"'} - - return StreamingResponse(file_obj, media_type=content_type, headers=headers) - - except HTTPException: - raise - except Exception: - raise HTTPException(status_code=500, detail="Failed to retrieve photo") + photo = storage_service.get_activity_photo_record(db, activity_id, photo_file_name) + file_obj, content_type, headers = storage_service.open_activity_photo(photo) + return StreamingResponse(file_obj, media_type=content_type, headers=headers) @activity_router.post( "/activities", - response_model=meter_schemas.MeterActivity, + response_model=meter.MeterActivity, dependencies=[Depends(ScopedUser.ActivityWrite)], tags=["Activities"], ) @@ -118,264 +56,18 @@ async def post_activity( ) try: - activity_form = meter_schemas.ActivityForm.parse_obj(json.loads(activity)) + activity_form = meter.ActivityForm.parse_obj(json.loads(activity)) except Exception as e: raise HTTPException(status_code=400, detail=f"Invalid activity payload: {e}") - # Set some variables that will be used to determine how the meter is updated - update_meter_state = True - user_level = user.user_role.name - - # First check that the date and time of the activity are newer than the last activity - last_activity = db.scalars( - select(MeterActivities) - .where(MeterActivities.meter_id == activity_form.activity_details.meter_id) - .order_by(MeterActivities.timestamp_end.desc()) - .limit(1) - ).first() - - # Calculate event start and end datetimes - activity_date = activity_form.activity_details.date.date() - # Set the times to have 0 seconds, this prevents accidental duplicate activities - starttime = activity_form.activity_details.start_time.time().replace(second=0) - endtime = activity_form.activity_details.end_time.time().replace(second=0) - start_datetime = datetime.combine(activity_date, starttime) - end_datetime = datetime.combine(activity_date, endtime) - - if last_activity: - if last_activity.timestamp_end > end_datetime: - update_meter_state = False - - if user_level != "Admin": - raise HTTPException( - status_code=409, - detail="Submitted activity is older than the last activity.", - ) - - activity_meter = db.scalars( - select(Meters).where(activity_form.activity_details.meter_id == Meters.id) - ).first() - - activity_type = db.scalars( - select(ActivityTypeLU).where( - activity_form.activity_details.activity_type_id == ActivityTypeLU.id - ) - ).first() - - # Get the location of the activity based on the well associated with the meter - # If there is no well, assume the activity took place at the "Warehouse" - hq_location = db.scalars( - select(Locations).where(Locations.type_id == 1) - ).first() # Probably needs a slug - - if activity_form.current_installation.well_id: - activity_well = db.scalars( - select(Wells).where(activity_form.current_installation.well_id == Wells.id) - ).first() - activity_location = activity_well.location.id - else: - activity_location = hq_location.id - - # ---- Create the activity itself ---- - meter_activity = MeterActivities( - timestamp_start=start_datetime, - timestamp_end=end_datetime, - description=activity_form.maintenance_repair.description, - submitting_user_id=activity_form.activity_details.user_id, - meter_id=activity_form.activity_details.meter_id, - activity_type_id=activity_form.activity_details.activity_type_id, - location_id=activity_location, - ose_share=activity_form.activity_details.share_ose, - water_users=activity_form.current_installation.water_users, + return await activity_service.create_activity( + db=db, + activity_form=activity_form, + user=user, + photos=photos, + max_photos_per_meter=MAX_PHOTOS_PER_METER, ) - # If a work order is associated with the activity, add it to the activity - if activity_form.activity_details.work_order_id: - meter_activity.work_order_id = activity_form.activity_details.work_order_id - - # Add the activity to the database and if it already exists raise an error - try: - db.add(meter_activity) - db.commit() - db.refresh(meter_activity) # make sure meter_activity.id is available - except IntegrityError as _e: - raise HTTPException( - status_code=409, detail="Activity overlaps with existing activity." - ) - - db.flush() - - # Create the observations - if activity_form.activity_details.share_ose: - # Set OSE flag in observation to true - share_ose_observation = True - else: - share_ose_observation = False - - for observation_form in activity_form.observations: - observation_time = observation_form.time.time() - observation_datetime = datetime.combine(activity_date, observation_time) - observation = MeterObservations( - timestamp=observation_datetime, - value=observation_form.reading, - observed_property_type_id=observation_form.property_type_id, - unit_id=observation_form.unit_id, - submitting_user_id=activity_form.activity_details.user_id, - meter_id=activity_form.activity_details.meter_id, - location_id=activity_location, - ose_share=share_ose_observation, - ) - db.add(observation) - - # Associate notes - notes = db.scalars( - select(NoteTypeLU).where( - NoteTypeLU.id.in_(activity_form.notes.selected_note_ids) - ) - ).all() - meter_activity.notes = notes - - # Associate working status note - status_note_type = db.scalars( - select(NoteTypeLU).where( - NoteTypeLU.slug == activity_form.notes.working_on_arrival_slug - ) - ).first() - meter_activity.notes.append(status_note_type) - - # Associate and handle parts use - used_parts = db.scalars( - select(Parts).where(Parts.id.in_(activity_form.part_used_ids)) - ).all() - meter_activity.parts_used = used_parts - - for used_part in used_parts: - used_part.count -= 1 - - # Associate services performed - services = db.scalars( - select(ServiceTypeLU).where( - ServiceTypeLU.id.in_(activity_form.maintenance_repair.service_type_ids) - ) - ).all() - meter_activity.services_performed = services - - db.commit() - - # ---- Update the current state of the meter based on the activity type ---- - meter_statuses = db.scalars(select(MeterStatusLU)).all() - meter_statuses = {status.status_name: status.id for status in meter_statuses} - - if update_meter_state: - if (activity_type.name == "Uninstall") or ( - activity_type.name == "Uninstall and Hold" - ): # This needs to be a slug - activity_meter.location_id = hq_location.id - activity_meter.well_id = None - activity_meter.water_users = None - - if activity_type.name == "Uninstall and Hold": - # Set status as On Hold - activity_meter.status_id = meter_statuses["On Hold"] - else: - # Set status as Uninstalled - activity_meter.status_id = meter_statuses["Warehouse"] - - if activity_type.name == "Install": - activity_meter.well_id = activity_well.id - activity_meter.location_id = activity_location - activity_meter.status_id = meter_statuses["Installed"] - activity_meter.water_users = activity_form.current_installation.water_users - - if activity_type.name == "Scrap": - activity_meter.well_id = None - activity_meter.location_id = None - activity_meter.status_id = meter_statuses["Scrapped"] - activity_meter.water_users = None - activity_meter.meter_owner = None - - if activity_type.name == "Sell": - activity_meter.well_id = None - activity_meter.location_id = None - activity_meter.status_id = meter_statuses["Sold"] - activity_meter.water_users = None - activity_meter.meter_owner = activity_form.current_installation.meter_owner - - if activity_type.name == "Change Water Users": - activity_meter.water_users = activity_form.current_installation.water_users - - # Make updates to the meter based on user's entry in the current installation section - if activity_type.name != "Uninstall": - activity_meter.contact_name = ( - activity_form.current_installation.contact_name - ) - activity_meter.contact_phone = ( - activity_form.current_installation.contact_phone - ) - activity_meter.notes = activity_form.current_installation.notes - - db.commit() - - # ---- Handle photo file uploads ---- - if photos: - print(f"Received {len(photos)} photos") - print(f"Uploading to bucket={BUCKET_NAME}, prefix={PHOTO_PREFIX}") - client = storage.Client() - bucket = client.bucket(BUCKET_NAME) - - for file in photos: - ext = Path(file.filename).suffix or ".jpg" - unique_name = f"{uuid.uuid4()}{ext}" - blob_path = f"{PHOTO_PREFIX}/{meter_activity.id}/{unique_name}" - blob = bucket.blob(blob_path) - - # Upload file content directly - try: - contents = await file.read() - print(f"Uploading {file.filename}, size={len(contents)} bytes") - - blob.upload_from_string(contents, content_type=file.content_type) - print(f"Uploaded to gs://{BUCKET_NAME}/{blob_path}") - except Exception as e: - print(f"ERROR uploading {file.filename}: {e}") - raise - - photo = MeterActivityPhotos( - meter_activity_id=meter_activity.id, - file_name=unique_name, - gcs_path=blob_path, - ) - db.add(photo) - - db.commit() - print(f"Saved {len(photos)} photos for activity {meter_activity.id}") - db.refresh(meter_activity) - - # ---- Enforce per-meter retention ---- - all_photos = ( - db.query(MeterActivityPhotos) - .join(MeterActivities) - .filter(MeterActivities.meter_id == meter_activity.meter_id) - .order_by(MeterActivityPhotos.uploaded_at.desc()) - .all() - ) - - if len(all_photos) > MAX_PHOTOS_PER_METER: - # keep newest MAX_PHOTOS_PER_METER, delete the rest - to_delete = all_photos[MAX_PHOTOS_PER_METER:] - for old_photo in to_delete: - try: - bucket.blob(old_photo.gcs_path).delete() - except Exception as e: - print( - f"Warning: failed to delete {old_photo.gcs_path} from GCS: {e}" - ) - db.delete(old_photo) - - db.commit() - - return meter_activity - @activity_router.patch( "/activities", @@ -383,87 +75,9 @@ async def post_activity( tags=["Activities"], ) def patch_activity( - patch_activity_form: meter_schemas.PatchActivity, db: Session = Depends(get_db) + patch_activity_form: meter.PatchActivity, db: Session = Depends(get_db) ): - """ - Patch an activity. - All input times should be UTC - """ - # Get the activity - activity = db.scalars( - select(MeterActivities).where( - MeterActivities.id == patch_activity_form.activity_id - ) - ).first() - - # Update the activity - activity.timestamp_start = patch_activity_form.timestamp_start - activity.timestamp_end = patch_activity_form.timestamp_end - activity.description = patch_activity_form.description - activity.ose_share = patch_activity_form.ose_share - activity.water_users = patch_activity_form.water_users - - # When updating location, if location_id is null assume the activity took place at the "Warehouse" - if patch_activity_form.location_id is None: - hq_location = db.scalars( - select(Locations).where(Locations.type_id == 1) - ).first() - activity.location_id = hq_location.id - else: - activity.location_id = patch_activity_form.location_id - - # Update the notes - # Easiest approach is to just delete existing and then re-add if there are any - delete_sql = text('DELETE FROM "Notes" WHERE meter_activity_id = :activity_id') - db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) - - if patch_activity_form.note_ids: - insert_sql = text( - 'INSERT INTO "Notes" (meter_activity_id, note_type_id) VALUES (:activity_id, :note_id)' - ) - for note_id in patch_activity_form.note_ids: - db.execute( - insert_sql, - {"activity_id": patch_activity_form.activity_id, "note_id": note_id}, - ) - - # Update the parts used - delete_sql = text('DELETE FROM "PartsUsed" WHERE meter_activity_id = :activity_id') - db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) - - if patch_activity_form.part_ids: - insert_sql = text( - 'INSERT INTO "PartsUsed" (meter_activity_id, part_id) VALUES (:activity_id, :part_id)' - ) - for part_id in patch_activity_form.part_ids: - db.execute( - insert_sql, - {"activity_id": patch_activity_form.activity_id, "part_id": part_id}, - ) - - # Update the services performed - delete_sql = text( - 'DELETE FROM "ServicesPerformed" WHERE meter_activity_id = :activity_id' - ) - db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) - - if patch_activity_form.service_ids: - insert_sql = text( - 'INSERT INTO "ServicesPerformed" (meter_activity_id, service_type_id) VALUES (:activity_id, :service_id)' - ) - for service_id in patch_activity_form.service_ids: - db.execute( - insert_sql, - { - "activity_id": patch_activity_form.activity_id, - "service_id": service_id, - }, - ) - - # Commit the changes - db.commit() - - return {"status": "success"} + return activity_service.patch_activity(db, patch_activity_form) @activity_router.delete( @@ -472,51 +86,7 @@ def patch_activity( tags=["Activities"], ) def delete_activity(activity_id: int, db: Session = Depends(get_db)): - """ - Deletes an activity. - """ - # Get the activity - activity = db.scalars( - select(MeterActivities).where(MeterActivities.id == activity_id) - ).first() - - if not activity: - raise HTTPException(status_code=404, detail="Activity not found.") - - photos = db.scalars( - select(MeterActivityPhotos).where( - MeterActivityPhotos.meter_activity_id == activity_id - ) - ).all() - - storage_client = storage.Client() - bucket = storage_client.bucket(BUCKET_NAME) - - for photo in photos: - try: - blob = bucket.blob(photo.gcs_path) - blob.delete() - print(f"Deleted GCS object: {photo.gcs_path}") - except Exception as e: - print(f"Failed to delete {photo.gcs_path} from bucket: {e}") - - # Delete any notes associated with the activity - sql = text('DELETE FROM "Notes" WHERE meter_activity_id = :activity_id') - db.execute(sql, {"activity_id": activity_id}) - - # Delete any services performed associated with the activity - sql = text('DELETE FROM "ServicesPerformed" WHERE meter_activity_id = :activity_id') - db.execute(sql, {"activity_id": activity_id}) - - # Delete any parts used associated with the activity - sql = text('DELETE FROM "PartsUsed" WHERE meter_activity_id = :activity_id') - db.execute(sql, {"activity_id": activity_id}) - - # Delete the activity - db.delete(activity) - db.commit() - - return {"status": "success"} + return activity_service.delete_activity(db, activity_id) @activity_router.patch( @@ -525,44 +95,10 @@ def delete_activity(activity_id: int, db: Session = Depends(get_db)): tags=["Activities"], ) def patch_observation( - patch_observation_form: meter_schemas.PatchObservation, + patch_observation_form: meter.PatchObservation, db: Session = Depends(get_db), ): - """ - Patch an observation. - All input times should be UTC - """ - # Get the observation - observation = db.scalars( - select(MeterObservations).where( - MeterObservations.id == patch_observation_form.observation_id - ) - ).first() - - # Update the observation - observation.timestamp = patch_observation_form.timestamp - observation.value = patch_observation_form.value - observation.notes = patch_observation_form.notes - observation.observed_property_type_id = ( - patch_observation_form.observed_property_type_id - ) - observation.unit_id = patch_observation_form.unit_id - observation.meter_id = patch_observation_form.meter_id - observation.submitting_user_id = patch_observation_form.submitting_user_id - observation.ose_share = patch_observation_form.ose_share - - # When updating location, if location_id is null assume the observation took place at the "Warehouse" - if patch_observation_form.location_id is None: - hq_location = db.scalars( - select(Locations).where(Locations.type_id == 1) - ).first() - observation.location_id = hq_location.id - else: - observation.location_id = patch_observation_form.location_id - - db.commit() - - return {"status": "success"} + return activity_service.patch_observation(db, patch_observation_form) @activity_router.delete( @@ -571,49 +107,19 @@ def patch_observation( tags=["Activities"], ) def delete_observation(observation_id: int, db: Session = Depends(get_db)): - """ - Deletes an observation. - """ - # Get the observation - observation = db.scalars( - select(MeterObservations).where(MeterObservations.id == observation_id) - ).first() - - # Return error if the observation doesn't exist - if not observation: - raise HTTPException(status_code=404, detail="Observation not found.") - - # Delete the observation - db.delete(observation) - db.commit() - - return {"status": "success"} + return activity_service.delete_observation(db, observation_id) @activity_router.get( "/activity_types", - response_model=List[meter_schemas.ActivityTypeLU], + response_model=List[meter.ActivityTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_activity_types( db: Session = Depends(get_db), user: Users = Depends(get_current_user) ): - """ - Only returns activity types approved for user type. - """ - if user.user_role.name not in ["Admin", "Technician"]: - return [] - else: - activities = db.scalars(select(ActivityTypeLU)).all() - if user.user_role.name != "Admin": - return [ - activity - for activity in activities - if activity.name not in ["Sell", "Scrap"] - ] - - return activities + return activity_service.get_activity_types(db, user) @activity_router.get( @@ -622,49 +128,37 @@ def get_activity_types( tags=["Activities"], ) def get_users(db: Session = Depends(get_db)): - return db.scalars( - select(Users) - .options(undefer(Users.user_role_id)) - .where(Users.disabled == False) - ).all() + return activity_service.get_users(db) @activity_router.get( "/units", - response_model=List[meter_schemas.Unit], + response_model=List[meter.Unit], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_units(db: Session = Depends(get_db)): - return db.scalars(select(Units)).all() + return activity_service.get_units(db) @activity_router.get( "/observed_property_types", - response_model=List[meter_schemas.ObservedPropertyTypeLU], + response_model=List[meter.ObservedPropertyTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_observed_property_types(db: Session = Depends(get_db)): - return ( - db.scalars( - select(ObservedPropertyTypeLU).options( - joinedload(ObservedPropertyTypeLU.units) - ) - ) - .unique() - .all() - ) + return activity_service.get_observed_property_types(db) @activity_router.get( "/service_types", - response_model=List[meter_schemas.ServiceTypeLU], + response_model=List[meter.ServiceTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Activities"], ) def get_service_types(db: Session = Depends(get_db)): - return db.scalars(select(ServiceTypeLU)).all() + return activity_service.get_service_types(db) @activity_router.get( @@ -673,323 +167,4 @@ def get_service_types(db: Session = Depends(get_db)): tags=["Activities"], ) def get_note_types(db: Session = Depends(get_db)): - return db.scalars(select(NoteTypeLU)).all() - - -@activity_router.get( - "/work_orders", - dependencies=[Depends(ScopedUser.Read)], - tags=["Work Orders"], -) -def get_work_orders( - filter_by_status: Annotated[list[WorkOrderStatus], Query()] = [ - WorkOrderStatus.Open - ], - start_date: datetime = Query(datetime.strptime("2024-06-01", "%Y-%m-%d")), - work_order_id: Annotated[list[int] | None, Query()] = None, - assigned_user_id: int | None = None, - q: str | None = None, - db: Session = Depends(get_db), -): - stmt = ( - select(workOrders) - .options( - joinedload(workOrders.status), - joinedload(workOrders.meter), - joinedload(workOrders.assigned_user), - ) - .join(workOrderStatusLU) - .where(workOrderStatusLU.name.in_(filter_by_status)) - .where(workOrders.date_created >= start_date) - ) - - if work_order_id: - stmt = stmt.where(workOrders.id.in_(work_order_id)) - - if assigned_user_id: - stmt = stmt.where(workOrders.assigned_user_id == assigned_user_id) - - if q: - q_like = f"%{q.strip()}%" - stmt = stmt.where( - or_( - workOrders.title.ilike(q_like), - workOrders.description.ilike(q_like), - workOrders.creator.ilike(q_like), - workOrders.notes.ilike(q_like), - workOrders.meter.has(Meters.serial_number.ilike(q_like)), - ) - ) - - work_orders = db.scalars(stmt).all() - - # grab activities separately - relevant_activities = db.scalars( - select(MeterActivities) - .options(joinedload(MeterActivities.location)) - .where(MeterActivities.work_order_id.in_([wo.id for wo in work_orders])) - ).all() - - # group activities by work_order_id - activities_by_wo = {} - for act in relevant_activities: - activities_by_wo.setdefault(act.work_order_id, []).append( - { - "id": act.id, - "timestamp_start": act.timestamp_start, - "timestamp_end": act.timestamp_end, - "description": act.description, - "submitting_user_id": act.submitting_user_id, - "meter_id": act.meter_id, - "activity_type_id": act.activity_type_id, - "location_id": act.location_id, - "location_name": act.location.name if act.location else None, - "ose_share": act.ose_share, - "water_users": act.water_users, - } - ) - - # build output - output = [] - for wo in work_orders: - output.append( - { - "work_order_id": wo.id, - "ose_request_id": wo.ose_request_id, - "date_created": wo.date_created, - "creator": wo.creator, - "meter_id": wo.meter.id, - "meter_serial": wo.meter.serial_number, - "title": wo.title, - "description": wo.description, - "status": wo.status.name, - "notes": wo.notes, - "assigned_user_id": wo.assigned_user_id, - "assigned_user": wo.assigned_user.username - if wo.assigned_user - else None, - "associated_activities": activities_by_wo.get(wo.id, []), - } - ) - - return output - - -# Create work order endpoint -@activity_router.post( - "/work_orders", - dependencies=[Depends(ScopedUser.Admin)], - response_model=meter_schemas.WorkOrder, - tags=["Work Orders"], -) -def create_work_order( - new_work_order: meter_schemas.CreateWorkOrder, db: Session = Depends(get_db) -): - """ - Create a new work order dated to the current time. - The only mandatory inputs are the date, meter ID, and the title of the work order. - """ - # Get status ID Open - open_status = db.scalars( - select(workOrderStatusLU).where(workOrderStatusLU.name == "Open") - ).first() - - # Create a new work order - work_order = workOrders( - date_created=new_work_order.date_created, - meter_id=new_work_order.meter_id, - title=new_work_order.title, - status_id=open_status.id, - ) - - # Add optional fields if they exist - if new_work_order.description: - work_order.description = new_work_order.description - if new_work_order.notes: - work_order.notes = new_work_order.notes - if new_work_order.assigned_user_id: - work_order.assigned_user_id = new_work_order.assigned_user_id - if new_work_order.creator: - work_order.creator = new_work_order.creator - if new_work_order.ose_request_id: - work_order.ose_request_id = new_work_order.ose_request_id - - # Commit the work order - # Database should block empty title and non-unique (date, title, meter_id) combinations - try: - db.add(work_order) - db.commit() - except IntegrityError as _e: - raise HTTPException( - status_code=409, detail="Title empty or already exists for this meter." - ) - - # Create a WorkOrder schema for the updated work order - work_order_schema = meter_schemas.WorkOrder( - work_order_id=work_order.id, - date_created=work_order.date_created, - creator=work_order.creator, - meter_id=work_order.meter.id, - meter_serial=work_order.meter.serial_number, - title=work_order.title, - description=work_order.description, - status=work_order.status.name, - notes=work_order.notes, - assigned_user_id=work_order.assigned_user_id, - assigned_user=work_order.assigned_user.username - if work_order.assigned_user - else None, - ) - - return work_order_schema - - -# Patch work order endpoint -@activity_router.patch( - "/work_orders", - response_model=meter_schemas.WorkOrder, - tags=["Work Orders"], -) -def patch_work_order( - patch_work_order_form: meter_schemas.PatchWorkOrder, - user: Users = Depends(security.get_current_user), - db: Session = Depends(get_db), -): - """ - Patch a work order. - The input schema limits the fields that can be updated to the title, description, status, notes, and assigned user. - This is to prevent confusion with other open work orders. - """ - # Determine if update can be made by Tech - comparison_work_order = meter_schemas.PatchWorkOrder( - work_order_id=patch_work_order_form.work_order_id, - status=patch_work_order_form.status, - notes=patch_work_order_form.notes, - ) - - if comparison_work_order == patch_work_order_form: - update_scope = "Technician" - else: - update_scope = "Admin" - - # Check if the user has the correct permissions to update the work order - if user.user_role.name not in [update_scope, "Admin"]: - raise HTTPException( - status_code=403, - detail="User does not have permission to update this work order.", - ) - - # Get the work order - work_order = db.scalars( - select(workOrders) - .options( - joinedload(workOrders.status), - joinedload(workOrders.meter), - joinedload(workOrders.assigned_user), - ) - .where(workOrders.id == patch_work_order_form.work_order_id) - ).first() - - # Ensure the current user is assigned the work order if they are a technician - if user.user_role.name == "Technician": - if work_order.assigned_user_id != user.id: - raise HTTPException( - status_code=403, - detail="User does not have permission to update this work order.", - ) - - # An empty string for a title will silently fail due to the if statement below. Detect here and return an error to the user. - if patch_work_order_form.title == "": - raise HTTPException(status_code=422, detail="Title cannot be empty.") - - # Update the work order if the field exists - if patch_work_order_form.title: - work_order.title = patch_work_order_form.title - if patch_work_order_form.description: - work_order.description = patch_work_order_form.description - if patch_work_order_form.status: - # Get the status ID of the new status name - new_status = db.scalars( - select(workOrderStatusLU).where( - workOrderStatusLU.name == patch_work_order_form.status - ) - ).first() - work_order.status_id = new_status.id - if patch_work_order_form.notes: - work_order.notes = patch_work_order_form.notes - if patch_work_order_form.creator: - work_order.creator = patch_work_order_form.creator - if patch_work_order_form.assigned_user_id: - work_order.assigned_user_id = patch_work_order_form.assigned_user_id - - # Commit the changes - # Database should block empty title and non-unique (date, title, meter_id) combinations - try: - db.commit() - except IntegrityError as _e: - raise HTTPException( - status_code=409, detail="Title already exists for this meter." - ) - - # Get the updated work order (needed by the frontend) - work_order = db.scalars( - select(workOrders) - .options( - joinedload(workOrders.status), - joinedload(workOrders.meter), - joinedload(workOrders.assigned_user), - ) - .join(workOrderStatusLU) - .where(workOrders.id == patch_work_order_form.work_order_id) - ).first() - - # I was unable to get associated_activities to work with joinedload, so I'm doing it manually here - associated_activities = db.scalars( - select(MeterActivities).where(MeterActivities.work_order_id == work_order.id) - ).all() - - # Create a WorkOrder schema for the updated work order - work_order_schema = meter_schemas.WorkOrder( - work_order_id=work_order.id, - date_created=work_order.date_created, - creator=work_order.creator, - meter_id=work_order.meter.id, - meter_serial=work_order.meter.serial_number, - title=work_order.title, - description=work_order.description, - status=work_order.status.name, - notes=work_order.notes, - assigned_user_id=work_order.assigned_user_id, - assigned_user=work_order.assigned_user.username - if work_order.assigned_user - else None, - associated_activities=list(associated_activities), - ) - - return work_order_schema - - -# Delete work order endpoint -@activity_router.delete( - "/work_orders", - dependencies=[Depends(ScopedUser.Admin)], - tags=["Work Orders"], -) -def delete_work_order(work_order_id: int, db: Session = Depends(get_db)): - """ - Deletes a work order. - """ - # Get the work order - work_order = db.scalars( - select(workOrders).where(workOrders.id == work_order_id) - ).first() - - # Return error if the work order doesn't exist - if not work_order: - raise HTTPException(status_code=404, detail="Work order not found.") - - # Delete the work order - db.delete(work_order) - db.commit() - - return {"status": "success"} + return activity_service.get_note_types(db) diff --git a/api/routes/admin.py b/api/routes/admin.py index 71bd8703..f29876c9 100644 --- a/api/routes/admin.py +++ b/api/routes/admin.py @@ -5,13 +5,13 @@ from typing import List from passlib.context import CryptContext -from api.models.main_models import Users, UserRoles, SecurityScopes +from api.models.user import Users, UserRoles, SecurityScopes -from api.schemas import security_schemas -from api.schemas import admin_schemas +from api.schemas import security +from api.schemas import admin from api.session import get_db -from api.route_util import _patch -from api.enums import ScopedUser +from api.routes.utils import _patch +from api.auth.dependencies import ScopedUser from pathlib import Path from google.cloud import storage @@ -34,12 +34,12 @@ # define response models @admin_router.post( "/users/update_password", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) def update_user_password( - updatedUserPassword: security_schemas.UpdatedUserPassword, + updatedUserPassword: security.UpdatedUserPassword, db: Session = Depends(get_db), ): user = db.scalars( @@ -55,12 +55,12 @@ def update_user_password( @admin_router.patch( "/users", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) def update_user( - updated_user: security_schemas.UpdatedUser, db: Session = Depends(get_db) + updated_user: security.UpdatedUser, db: Session = Depends(get_db) ): _patch(db, Users, updated_user.id, updated_user) @@ -80,11 +80,11 @@ def update_user( @admin_router.post( "/users", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) -def create_user(user: security_schemas.NewUser, db: Session = Depends(get_db)): +def create_user(user: security.NewUser, db: Session = Depends(get_db)): new_user = Users( username=user.username, email=user.email, @@ -114,7 +114,7 @@ def create_user(user: security_schemas.NewUser, db: Session = Depends(get_db)): @admin_router.get( "/users/{id}", - response_model=security_schemas.User, + response_model=security.User, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -141,7 +141,7 @@ def get_user_admin(id: int, db: Session = Depends(get_db)): @admin_router.get( "/usersadmin", - response_model=List[security_schemas.User], + response_model=List[security.User], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -165,7 +165,7 @@ def get_users_admin(db: Session = Depends(get_db)): @admin_router.get( "/security_scopes", - response_model=List[security_schemas.SecurityScope], + response_model=List[security.SecurityScope], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -175,7 +175,7 @@ def get_security_scopes(db: Session = Depends(get_db)): @admin_router.get( "/roles", - response_model=List[security_schemas.UserRole], + response_model=List[security.UserRole], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -189,11 +189,11 @@ def get_roles(db: Session = Depends(get_db)): @admin_router.post( "/roles", - response_model=security_schemas.UserRole, + response_model=security.UserRole, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) -def create_role(new_role: security_schemas.UserRole, db: Session = Depends(get_db)): +def create_role(new_role: security.UserRole, db: Session = Depends(get_db)): scopes = [] if new_role.security_scopes: scope_ids = map(lambda s: s.id, new_role.security_scopes) @@ -216,11 +216,11 @@ def create_role(new_role: security_schemas.UserRole, db: Session = Depends(get_d @admin_router.patch( "/roles", - response_model=security_schemas.UserRole, + response_model=security.UserRole, dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) -def update_role(updated_role: security_schemas.UserRole, db: Session = Depends(get_db)): +def update_role(updated_role: security.UserRole, db: Session = Depends(get_db)): role = db.scalars(select(UserRoles).where(UserRoles.id == updated_role.id)).first() scope_ids = map(lambda s: s.id, updated_role.security_scopes) @@ -243,7 +243,7 @@ def update_role(updated_role: security_schemas.UserRole, db: Session = Depends(g @admin_router.get( "/db-backups", - response_model=List[admin_schemas.BackupFile], + response_model=List[admin.BackupFile], dependencies=[Depends(ScopedUser.Admin)], tags=["Admin"], ) @@ -267,7 +267,7 @@ def list_db_backups( blobs_iter = client.list_blobs(BUCKET_NAME, prefix=prefix) - results: list[admin_schemas.BackupFile] = [] + results: list[admin.BackupFile] = [] for i, blob in enumerate(blobs_iter): if i >= limit: break @@ -293,7 +293,7 @@ def list_db_backups( fmt = f"unknown ({ext})" if ext else "unknown" results.append( - admin_schemas.BackupFile( + admin.BackupFile( name=display_name, file_size=int(blob.size or 0), format=fmt, diff --git a/api/routes/chlorides.py b/api/routes/chlorides.py index 44dd32b2..6b727312 100644 --- a/api/routes/chlorides.py +++ b/api/routes/chlorides.py @@ -5,14 +5,16 @@ from weasyprint import HTML from io import BytesIO from fastapi import APIRouter, Depends, Query -from pydantic import BaseModel from sqlalchemy import and_, select from sqlalchemy.orm import Session, joinedload -from api.schemas import well_schemas -from api.models.main_models import WellMeasurements, Wells, Locations, WellUseLU +from api.schemas import chlorides +from api.schemas import well +from api.models.location import Locations +from api.models.well import WellMeasurements, Wells, WellUseLU from api.session import get_db -from api.enums import ScopedUser, SortDirection +from api.auth.dependencies import ScopedUser +from api.enums import SortDirection from pathlib import Path from jinja2 import Environment, FileSystemLoader, select_autoescape @@ -32,7 +34,7 @@ @public_chlorides_router.get( "/chlorides", - response_model=List[well_schemas.WellMeasurementDTO], + response_model=List[well.WellMeasurementDTO], tags=["Chlorides"], ) def read_chlorides( @@ -59,7 +61,7 @@ def read_chlorides( @public_chlorides_router.get( "/chloride_groups", - response_model=List[well_schemas.ChlorideGroupResponse], + response_model=List[well.ChlorideGroupResponse], tags=["Chlorides"], ) def get_chloride_groups( @@ -93,26 +95,10 @@ def get_chloride_groups( {"id": group_id, "names": sorted(names)} for group_id, names in groups.items() ] - -class MinMaxAvgMedCount(BaseModel): - min: Optional[float] = None - max: Optional[float] = None - avg: Optional[float] = None - median: Optional[float] = None - count: int = 0 - - -class ChlorideReportNums(BaseModel): - north: MinMaxAvgMedCount - south: MinMaxAvgMedCount - east: MinMaxAvgMedCount - west: MinMaxAvgMedCount - - @authenticated_chlorides_router.get( "/chlorides/report", dependencies=[Depends(ScopedUser.Read)], - response_model=ChlorideReportNums, + response_model=chlorides.ChlorideReportNums, tags=["Chlorides"], ) def get_chlorides_report( @@ -191,7 +177,7 @@ def get_chlorides_report( else: west_vals.append(float(val)) - return ChlorideReportNums( + return chlorides.ChlorideReportNums( north=_stats(north_vals), south=_stats(south_vals), east=_stats(east_vals), @@ -240,11 +226,11 @@ def download_chlorides_report_pdf( @authenticated_chlorides_router.post( "/chlorides", dependencies=[Depends(ScopedUser.WellMeasurementWrite)], - response_model=well_schemas.ChlorideMeasurement, + response_model=well.ChlorideMeasurement, tags=["Chlorides"], ) def add_chloride_measurement( - chloride_measurement: well_schemas.WellMeasurement, + chloride_measurement: well.WellMeasurement, db: Session = Depends(get_db), ): # Create a new chloride measurement as a WellMeasurement @@ -265,11 +251,11 @@ def add_chloride_measurement( @authenticated_chlorides_router.patch( "/chlorides", dependencies=[Depends(ScopedUser.WellMeasurementWrite)], - response_model=well_schemas.WellMeasurement, + response_model=well.WellMeasurement, tags=["Chlorides"], ) def patch_chloride_measurement( - chloride_measurement_patch: well_schemas.PatchChlorideMeasurement, + chloride_measurement_patch: well.PatchChlorideMeasurement, db: Session = Depends(get_db), ): well_measurement = ( @@ -306,12 +292,12 @@ def delete_chloride_measurement(chloride_measurement_id: int, db: Session = Depe return True -def _stats(values: List[Optional[float]]) -> MinMaxAvgMedCount: +def _stats(values: List[Optional[float]]) -> chlorides.MinMaxAvgMedCount: clean = [v for v in values if v is not None] if not clean: - return MinMaxAvgMedCount() + return chlorides.MinMaxAvgMedCount() - return MinMaxAvgMedCount( + return chlorides.MinMaxAvgMedCount( min=min(clean), max=max(clean), avg=sum(clean) / len(clean), diff --git a/api/routes/maintenance.py b/api/routes/maintenance.py index 4adff0cd..d6f3836f 100644 --- a/api/routes/maintenance.py +++ b/api/routes/maintenance.py @@ -1,7 +1,6 @@ from fastapi import Depends, APIRouter, Query from sqlalchemy import func from sqlalchemy.orm import Session -from pydantic import BaseModel from typing import List from datetime import datetime, date from fastapi.responses import StreamingResponse @@ -9,17 +8,13 @@ from io import BytesIO from collections import defaultdict from matplotlib.pyplot import figure, close -from api.models.main_models import ( - Users, - Meters, - MeterActivities, - ActivityTypeLU, - Locations, - workOrders, - workOrderStatusLU, -) +from api.models.location import Locations +from api.models.meter import ActivityTypeLU, MeterActivities, Meters +from api.models.user import Users +from api.models.work_order import workOrders, workOrderStatusLU +from api.schemas import maintenance from api.session import get_db -from api.enums import ScopedUser +from api.auth.dependencies import ScopedUser from pathlib import Path from jinja2 import Environment, FileSystemLoader, select_autoescape @@ -38,38 +33,10 @@ authenticated_maintenance_router = APIRouter() public_maintenance_router = APIRouter() - -class MeterSummary(BaseModel): - meter: str - count: int - - -class MaintenanceRow(BaseModel): - date_time: datetime - technician: str - meter: str - trss: str - number_of_repairs: int - number_of_pms: int - - -class MaintenanceSummaryResponse(BaseModel): - repairs_by_meter: List[MeterSummary] - pms_by_meter: List[MeterSummary] - table_rows: List[MaintenanceRow] - - -class HomeSummaryResponse(BaseModel): - completed_work_orders: int - repairs_processed: int - reinstallations_processed: int - preventative_maintenance_processed: int - - @public_maintenance_router.get( "/maintenance/home_summary", tags=["Maintenance"], - response_model=HomeSummaryResponse, + response_model=maintenance.HomeSummaryResponse, ) def get_home_summary(db: Session = Depends(get_db)): completed_work_orders = ( @@ -105,7 +72,7 @@ def get_home_summary(db: Session = Depends(get_db)): @authenticated_maintenance_router.get( "/maintenance", tags=["Maintenance"], - response_model=MaintenanceSummaryResponse, + response_model=maintenance.MaintenanceSummaryResponse, dependencies=[Depends(ScopedUser.Read)], ) def get_maintenance_summary( diff --git a/api/routes/meters.py b/api/routes/meters.py index 889a2b00..6bb8c6c9 100644 --- a/api/routes/meters.py +++ b/api/routes/meters.py @@ -5,44 +5,26 @@ from sqlalchemy.exc import IntegrityError from fastapi_pagination.ext.sqlalchemy import paginate from fastapi_pagination import LimitOffsetPage -from enum import Enum -from api.schemas import meter_schemas -from api.schemas import well_schemas -from api.models.main_models import ( - Meters, - LandOwners, - MeterActivities, - PartsUsed, - Parts, - MeterObservations, - Locations, - MeterTypeLU, - Wells, - MeterStatusLU, - meterRegisters, -) -from api.route_util import _patch, _get +from api.schemas import meter +from api.schemas import well +from api.models.location import LandOwners, Locations +from api.models.meter import Meters, MeterStatusLU, MeterTypeLU, meterRegisters +from api.models.well import Wells +from api.routes.utils import _patch, _get from api.session import get_db -from api.enums import ScopedUser, MeterSortByField, MeterStatus, SortDirection -from google.auth import default, impersonated_credentials -from google.cloud import storage -from datetime import timedelta - -import os +from api.services import meters as meter_service +from api.auth.dependencies import ScopedUser +from api.enums import MeterSortByField, MeterStatus, SortDirection authenticated_meter_router = APIRouter() public_meter_router = APIRouter() -# Generate random secret at startup -PHOTO_JWT_EXPIRE_SECONDS = 600 # 10 minutes -BUCKET_NAME = os.getenv("GCP_BUCKET_NAME", "") - # Get paginated, sorted list of meters, filtered by a search string if applicable @authenticated_meter_router.get( "/meters", dependencies=[Depends(ScopedUser.Read)], - response_model=LimitOffsetPage[meter_schemas.MeterListDTO], + response_model=LimitOffsetPage[meter.MeterListDTO], tags=["Meters"], ) def get_meters( @@ -111,12 +93,12 @@ def sort_by_field_to_schema_field(name: MeterSortByField): @authenticated_meter_router.post( "/meters", - response_model=meter_schemas.Meter, + response_model=meter.Meter, dependencies=[Depends(ScopedUser.Admin)], tags=["Meters"], ) def create_meter( - new_meter: meter_schemas.SubmitNewMeter, db: Session = Depends(get_db) + new_meter: meter.SubmitNewMeter, db: Session = Depends(get_db) ): """ Create a new meter. This requires a SN and meter type. @@ -173,7 +155,7 @@ def create_meter( @authenticated_meter_router.get( "/meters_locations", dependencies=[Depends(ScopedUser.Read)], - response_model=List[meter_schemas.MeterMapDTO], + response_model=List[meter.MeterMapDTO], tags=["Meters"], ) def get_meters_locations( @@ -237,7 +219,7 @@ def get_meters_locations( meter_map_list = [] for row in result: meter_map_list.append( - meter_schemas.MeterMapDTO( + meter.MeterMapDTO( id=row.id, serial_number=row.serial_number, well={ @@ -299,7 +281,7 @@ def get_meter( @authenticated_meter_router.get( "/meter_types", - response_model=List[meter_schemas.MeterTypeLU], + response_model=List[meter.MeterTypeLU], dependencies=[Depends(ScopedUser.Read)], tags=["Meters"], ) @@ -310,7 +292,7 @@ def get_meter_types(db: Session = Depends(get_db)): # A route to return register types from meter_register table @authenticated_meter_router.get( "/meter_registers", - response_model=List[meter_schemas.MeterRegister], + response_model=List[meter.MeterRegister], dependencies=[Depends(ScopedUser.Read)], tags=["Meters"], ) @@ -326,7 +308,7 @@ def get_meter_registers(db: Session = Depends(get_db)): # A route to return status types from the MeterStatusLU table @authenticated_meter_router.get( "/meter_status_types", - response_model=List[meter_schemas.MeterStatusLU], + response_model=List[meter.MeterStatusLU], dependencies=[Depends(ScopedUser.Read)], tags=["Meters"], ) @@ -336,12 +318,12 @@ def get_meter_status(db: Session = Depends(get_db)): @authenticated_meter_router.patch( "/meter_types", - response_model=meter_schemas.MeterTypeLU, + response_model=meter.MeterTypeLU, dependencies=[Depends(ScopedUser.Admin)], tags=["Meters"], ) def update_meter_type( - updated_meter_type: meter_schemas.MeterTypeLU, db: Session = Depends(get_db) + updated_meter_type: meter.MeterTypeLU, db: Session = Depends(get_db) ): _patch(db, MeterTypeLU, updated_meter_type.id, updated_meter_type) @@ -354,12 +336,12 @@ def update_meter_type( @authenticated_meter_router.post( "/meter_types", - response_model=meter_schemas.MeterTypeLU, + response_model=meter.MeterTypeLU, dependencies=[Depends(ScopedUser.Admin)], tags=["Meters"], ) def create_meter_type( - new_meter_type: meter_schemas.MeterTypeLU, db: Session = Depends(get_db) + new_meter_type: meter.MeterTypeLU, db: Session = Depends(get_db) ): new_type_model = MeterTypeLU( brand=new_meter_type.brand, @@ -380,7 +362,7 @@ def create_meter_type( @authenticated_meter_router.get( "/land_owners", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.LandOwner], + response_model=List[well.LandOwner], tags=["Meters"], ) def get_land_owners( @@ -392,11 +374,11 @@ def get_land_owners( @authenticated_meter_router.patch( "/meter", dependencies=[Depends(ScopedUser.Admin)], - response_model=meter_schemas.Meter, + response_model=meter.Meter, tags=["Meters"], ) def patch_meter( - updated_meter: meter_schemas.SubmitMeterUpdate, db: Session = Depends(get_db) + updated_meter: meter.SubmitMeterUpdate, db: Session = Depends(get_db) ): """ Update the current state of a meter. This is only used by Meter Details on the frontend. @@ -453,127 +435,4 @@ def patch_meter( "/meter_history", dependencies=[Depends(ScopedUser.Read)], tags=["Meters"] ) def get_meter_history(meter_id: int, db: Session = Depends(get_db)): - """ - Get a list of the given meters history. - No defined schema for this at the moment. - """ - - class HistoryType(Enum): - Activity = "Activity" - Observation = "Observation" - LocationChange = "LocationChange" - - activities = ( - db.scalars( - select(MeterActivities) - .options( - joinedload(MeterActivities.location), - joinedload(MeterActivities.submitting_user), - joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used_links) - .joinedload(PartsUsed.part) - .joinedload(Parts.part_type), - joinedload(MeterActivities.notes), - joinedload(MeterActivities.services_performed), - ) - .filter(MeterActivities.meter_id == meter_id) - ) - .unique() - .all() - ) - - observations = db.scalars( - select(MeterObservations) - .options( - joinedload(MeterObservations.submitting_user), - joinedload(MeterObservations.observed_property), - joinedload(MeterObservations.unit), - joinedload(MeterObservations.location), - ) - .filter(MeterObservations.meter_id == meter_id) - ).all() - - # Take all the history object we just got from the database and make them into a object that's easy for the frontend to consume - formattedHistoryItems = [] - itemID = 0 - - for activity in activities: - activity.location.geom = None # FastAPI errors when returning this - - # Find if there is a well associated with the location - activity_well = db.scalars( - select(Wells).where(Wells.location_id == activity.location_id) - ).first() - - photos = [ - { - "id": photo.id, - "file_name": photo.file_name, - "url": create_signed_url(photo.gcs_path), - "uploaded_at": photo.uploaded_at, - } - for photo in activity.photos - ] - - formattedHistoryItems.append( - { - "id": itemID, - "history_type": HistoryType.Activity, - "well": activity_well, - "location": activity.location, - "activity_type": activity.activity_type_id, - "date": activity.timestamp_start, - "history_item": activity, - "photos": photos, - } - ) - itemID += 1 - - for observation in observations: - observation.location.geom = None - - # Find if there is a well associated with the location - observation_well = db.scalars( - select(Wells).where(Wells.location_id == observation.location_id) - ).first() - - formattedHistoryItems.append( - { - "id": itemID, - "history_type": HistoryType.Observation, - "well": observation_well, - "location": observation.location, - "date": observation.timestamp, - "history_item": observation, - } - ) - itemID += 1 - - # Add location history also - - formattedHistoryItems.sort(key=lambda x: x["date"], reverse=True) - - return formattedHistoryItems - - -def create_signed_url(blob_path: str) -> str: - """Create a v4 signed URL for a blob in GCS.""" - source_creds, _ = default() - target_sa = "pvacd-meterapp@waterdatainitiative-271000.iam.gserviceaccount.com" - - creds = impersonated_credentials.Credentials( - source_credentials=source_creds, - target_principal=target_sa, - target_scopes=["https://www.googleapis.com/auth/devstorage.read_only"], - lifetime=3600, - ) - - storage_client = storage.Client(credentials=creds) - bucket = storage_client.bucket(BUCKET_NAME) - blob = bucket.blob(blob_path) - url = blob.generate_signed_url( - version="v4", - expiration=timedelta(seconds=PHOTO_JWT_EXPIRE_SECONDS), - method="GET", - ) - return url + return meter_service.get_meter_history(db, meter_id) diff --git a/api/routes/notifications.py b/api/routes/notifications.py index 0efdad02..a5a8baae 100644 --- a/api/routes/notifications.py +++ b/api/routes/notifications.py @@ -6,9 +6,9 @@ from sqlalchemy import func, select from sqlalchemy.orm import Session, joinedload -from api.enums import ScopedUser -from api.models.main_models import Notifications, NotificationTypeLU, Users -from api.schemas.notification_schemas import ( +from api.auth.dependencies import ScopedUser +from api.models.user import Notifications, NotificationTypeLU, Users +from api.schemas.notifications import ( NotificationCreateRequest, NotificationCreateResult, Notification, diff --git a/api/routes/parts.py b/api/routes/parts.py index ca771877..7afd3ba5 100644 --- a/api/routes/parts.py +++ b/api/routes/parts.py @@ -1,628 +1,226 @@ -from fastapi import Depends, APIRouter, HTTPException, Query -from sqlalchemy.orm import Session, joinedload, selectinload -from sqlalchemy import select, func, literal, union_all -from typing import List, Union, Optional -from datetime import datetime, date, time -from fastapi.responses import StreamingResponse -from weasyprint import HTML -from io import BytesIO -from api.models.main_models import ( - Parts, - PartsUsed, - PartsAdded, - PartAssociation, - PartTypeLU, - Meters, - MeterTypeLU, - meterRegisters, - MeterActivities, +from datetime import date +from typing import List, Optional, Union + +from fastapi import APIRouter, Depends, HTTPException, Query +from fastapi.responses import StreamingResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session, joinedload + +from api.auth.dependencies import ScopedUser +from api.models.meter import MeterTypeLU, Meters +from api.models.part import PartAssociation, PartTypeLU, Parts +from api.routes.utils import _get +from api.schemas import parts +from api.services import parts as part_service +from api.session import get_db + + +part_router = APIRouter() + + +@part_router.get( + "/parts", + response_model=List[parts.Part], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], ) -from api.schemas import part_schemas -from api.session import get_db -from api.route_util import _get -from api.enums import ScopedUser -from sqlalchemy.exc import IntegrityError -from pathlib import Path -from jinja2 import Environment, FileSystemLoader, select_autoescape - -TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" - -templates = Environment( - loader=FileSystemLoader(TEMPLATES_DIR), - autoescape=select_autoescape(["html", "xml"]), +def get_parts( + db: Session = Depends(get_db), + in_use: Optional[bool] = Query(None, description="Filter by in_use status"), +): + return part_service.list_parts(db, in_use) + + +@part_router.get( + "/parts/used", + tags=["Parts"], + dependencies=[Depends(ScopedUser.Read)], ) +def get_parts_used_summary( + from_date: date = Query(..., description="Start date YYYY-MM-DD"), + to_date: date = Query(..., description="End date YYYY-MM-DD"), + parts: List[int] = Query(...), + db: Session = Depends(get_db), +): + return part_service.get_parts_used_summary(db, from_date, to_date, parts) -part_router = APIRouter() +@part_router.get( + "/parts/used/pdf", + tags=["Parts"], + dependencies=[Depends(ScopedUser.Read)], +) +def download_parts_used_pdf( + from_date: date = Query(..., description="Start date YYYY-MM-DD"), + to_date: date = Query(..., description="End date YYYY-MM-DD"), + parts: List[int] = Query(...), + db: Session = Depends(get_db), +): + pdf_io = part_service.build_parts_used_pdf(db, from_date, to_date, parts) -def _build_part_history_response(part_id: int, db: Session) -> part_schemas.PartHistoryResponse: - part = db.scalars(select(Parts).where(Parts.id == part_id)).first() - if not part: - raise HTTPException(status_code=404, detail="Part not found") - - added_q = select( - PartsAdded.id.label("ref_id"), - PartsAdded.part_id.label("part_id"), - PartsAdded.date.label("event_date"), - literal("added").label("event_type"), - PartsAdded.note.label("note"), - PartsAdded.count.label("delta"), - literal(None).label("work_order_id"), - ).where(PartsAdded.part_id == part_id) - - used_q = ( - select( - PartsUsed.id.label("ref_id"), - PartsUsed.part_id.label("part_id"), - MeterActivities.timestamp_start.label("event_date"), - literal("used").label("event_type"), - func.nullif(func.trim(MeterActivities.description), "").label("note"), - (-PartsUsed.count).label("delta"), - MeterActivities.work_order_id.label("work_order_id"), - ) - .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) - .where(PartsUsed.part_id == part_id) + return StreamingResponse( + pdf_io, + media_type="application/pdf", + headers={"Content-Disposition": "attachment; filename=parts_used_report.pdf"}, ) - events = union_all(added_q, used_q).subquery() - - rows = db.execute( - select( - events.c.ref_id, - events.c.part_id, - events.c.event_date, - events.c.event_type, - events.c.note, - events.c.delta, - events.c.work_order_id, - ).order_by(events.c.event_date.asc(), events.c.ref_id.asc()) - ).all() - running = int(part.initial_count) - history: list[part_schemas.PartHistoryRow] = [ - part_schemas.PartHistoryRow( - row_id=f"initial-{part_id}", - part_id=part_id, - event_date=datetime.min, - event_type="initial", - ref_id=None, - note="Initial count", - delta=0, - total_after=running, - work_order_id=None, - ) - ] - - for ref_id, pid, event_date, event_type, note, delta, work_order_id in rows: - if not isinstance(event_date, datetime): - event_date = datetime.combine(event_date, time.min) - - running += int(delta) - history.append( - part_schemas.PartHistoryRow( - row_id=f"{event_type}-{ref_id}", - part_id=pid, - event_date=event_date, - event_type=event_type, - ref_id=ref_id, - note=note, - delta=int(delta), - total_after=running, - work_order_id=work_order_id, - ) - ) +@part_router.get( + "/part_types", + response_model=List[parts.PartTypeLU], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], +) +def get_part_types(db: Session = Depends(get_db)): + return db.scalars(select(PartTypeLU)).all() - return part_schemas.PartHistoryResponse( - part_id=part.id, - part_number=part.part_number, - initial_count=part.initial_count, - current_count=running, - history=history, - ) - - -@part_router.get( - "/parts", - response_model=List[part_schemas.Part], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_parts( - db: Session = Depends(get_db), - in_use: Optional[bool] = Query(None, description="Filter by in_use status"), -): - used_subq = ( - select( - PartsUsed.part_id.label("part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - added_subq = ( - select( - PartsAdded.part_id.label("part_id"), - func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), - ) - .group_by(PartsAdded.part_id) - .subquery() - ) - - current_count = ( - Parts.initial_count - + func.coalesce(added_subq.c.added_sum, 0) - - func.coalesce(used_subq.c.used_sum, 0) - ).label("current_count") - - stmt = ( - select(Parts, current_count) - .outerjoin(used_subq, used_subq.c.part_id == Parts.id) - .outerjoin(added_subq, added_subq.c.part_id == Parts.id) - .options(selectinload(Parts.part_type)) - ) - - if in_use is not None: - stmt = stmt.where(Parts.in_use == in_use) - - rows = db.execute(stmt).all() - - results = [] - for part, curr in rows: - part.current_count = curr - results.append(part) - - return results - - -@part_router.get( - "/parts/used", - tags=["Parts"], - dependencies=[Depends(ScopedUser.Read)], -) -def get_parts_used_summary( - from_date: date = Query(..., description="Start date YYYY-MM-DD"), - to_date: date = Query(..., description="End date YYYY-MM-DD"), - parts: List[int] = Query(...), - db: Session = Depends(get_db), -): - # Convert to datetimes for inclusive range - start_dt = datetime.combine(from_date, datetime.min.time()) - end_dt = datetime.combine(to_date, datetime.max.time()) - - usage_subq = ( - db.query( - PartsUsed.part_id.label("used_part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("quantity"), - ) - .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) - .filter( - MeterActivities.timestamp_start >= start_dt, - MeterActivities.timestamp_start <= end_dt, - PartsUsed.part_id.in_(parts), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - query = ( - db.query( - Parts.id.label("id"), - Parts.part_number, - Parts.description, - Parts.price, - func.coalesce(usage_subq.c.quantity, 0).label("quantity"), - ) - .outerjoin(usage_subq, Parts.id == usage_subq.c.used_part_id) - .filter(Parts.id.in_(parts)) - .order_by(Parts.part_number) - ) - - results = [] - for row in query.all(): - price = float(row.price or 0) - quantity = int(row.quantity or 0) - total = price * quantity - results.append( - { - "id": row.id, - "part_number": row.part_number, - "description": row.description, - "price": price, - "quantity": quantity, - "total": total, - } - ) - - return results - - -@part_router.get( - "/parts/used/pdf", - tags=["Parts"], - dependencies=[Depends(ScopedUser.Read)], -) -def download_parts_used_pdf( - from_date: date = Query(..., description="Start date YYYY-MM-DD"), - to_date: date = Query(..., description="End date YYYY-MM-DD"), - parts: List[int] = Query(...), - db: Session = Depends(get_db), -): - # Re-use your existing logic - results = get_parts_used_summary( - from_date=from_date, to_date=to_date, parts=parts, db=db - ) - - # Add running total just for PDF - running_total = 0.0 - for r in results: - running_total += r["total"] - r["running_total"] = running_total - - template = templates.get_template("parts_used_report.html") - html_content = template.render( - rows=results, - from_date=from_date, - to_date=to_date, - ) - pdf_io = BytesIO() - HTML(string=html_content).write_pdf(pdf_io) - pdf_io.seek(0) - - return StreamingResponse( - pdf_io, - media_type="application/pdf", - headers={"Content-Disposition": "attachment; filename=parts_used_report.pdf"}, - ) - - -@part_router.get( - "/part_types", - response_model=List[part_schemas.PartTypeLU], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_part_types(db: Session = Depends(get_db)): - return db.scalars(select(PartTypeLU)).all() - - -@part_router.get( - "/part", - response_model=Union[part_schemas.Part, part_schemas.Register], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_part(part_id: int, db: Session = Depends(get_db)): - used_subq = ( - select( - PartsUsed.part_id.label("part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - added_subq = ( - select( - PartsAdded.part_id.label("part_id"), - func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), - ) - .group_by(PartsAdded.part_id) - .subquery() - ) - - current_count = ( - Parts.initial_count - + func.coalesce(added_subq.c.added_sum, 0) - - func.coalesce(used_subq.c.used_sum, 0) - ).label("current_count") - - row = db.execute( - select(Parts, current_count) - .outerjoin(used_subq, used_subq.c.part_id == Parts.id) - .outerjoin(added_subq, added_subq.c.part_id == Parts.id) - .where(Parts.id == part_id) - .options( - selectinload(Parts.part_type), - selectinload(Parts.meter_types), - ) - ).first() - - if not row: - return None - - selected_part, curr = row - selected_part.current_count = curr - - # Create the part_schemas.Part instance - returned_part = part_schemas.Part.model_validate(selected_part) - - # If part_type is a Register, we need to load the register details - if selected_part and selected_part.part_type.name == "Register": - register_details = db.scalars( - select(meterRegisters).where(meterRegisters.part_id == selected_part.id) - ).first() - - register_details_obj = None - if register_details is not None: - register_details_obj = ( - part_schemas.Register.register_details.model_validate(register_details) - ) - - # Update the returned_part to include register details - returned_part = part_schemas.Register( - **returned_part.model_dump(exclude_unset=True), - register_settings=register_details_obj, - ) - - return returned_part - - -@part_router.patch( - "/part", - response_model=part_schemas.Part, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def update_part(updated_part: part_schemas.Part, db: Session = Depends(get_db)): - # Update the part (this won't include secondary attributes like associations) - part_db = _get(db, Parts, updated_part.id) - - for k, v in updated_part.model_dump(exclude_unset=True).items(): - if k in ["part_type", "meter_types", "current_count"]: - continue - try: - setattr(part_db, k, v) - except AttributeError as e: - print(e) - continue - - try: - db.add(part_db) - db.commit() - except IntegrityError: - raise HTTPException(status_code=409, detail="Part SN already exists") - - # Load the updated part to get the relationships - part = db.scalars( - select(Parts) - .where(Parts.id == updated_part.id) - .options(joinedload(Parts.part_type)) - ).first() - - # Update associations, _patch only handles direct attributes - if updated_part.meter_types: - part.meter_types = db.scalars( - select(MeterTypeLU).where( - MeterTypeLU.id.in_(map(lambda type: type.id, updated_part.meter_types)) - ) - ).all() - - db.commit() - db.refresh(part) - - return part - - -@part_router.post( - "/parts", - response_model=part_schemas.Part, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def create_part(new_part: part_schemas.Part, db: Session = Depends(get_db)): - new_part_model = Parts( - part_number=new_part.part_number, - part_type_id=new_part.part_type_id, - description=new_part.description, - vendor=new_part.vendor, - initial_count=new_part.initial_count, - note=new_part.note, - in_use=new_part.in_use, - commonly_used=new_part.commonly_used, - price=new_part.price, - ) - - try: - db.add(new_part_model) - db.commit() - except IntegrityError: - raise HTTPException(status_code=409, detail="Part SN already exists") - - # Associate with meter types - if new_part.meter_types: - new_part_model.meter_types = db.scalars( - select(MeterTypeLU).where( - MeterTypeLU.id.in_(map(lambda type: type.id, new_part.meter_types)) - ) - ).all() - - db.commit() - db.refresh(new_part_model) - - # Load part_type relationship - new_part_model.part_type - - return new_part_model - - -@part_router.get( - "/meter_parts", - response_model=List[part_schemas.Part], - dependencies=[Depends(ScopedUser.Read)], - tags=["Parts"], -) -def get_meter_parts(meter_id: int, db: Session = Depends(get_db)): - meter_type_id = db.scalars( - select(Meters.meter_type_id).where(Meters.id == meter_id) - ).first() - - part_id_list = db.scalars( - select(PartAssociation.c.part_id).where( - PartAssociation.c.meter_type_id == meter_type_id - ) - ).all() - - meter_parts = db.scalars( - select(Parts) - .where(Parts.id.in_(part_id_list)) - .options(joinedload(Parts.part_type)) - ).all() - - return meter_parts - - -@part_router.post( - "/parts/add", - response_model=part_schemas.Part, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def add_parts(payload: part_schemas.PartsAddRequest, db: Session = Depends(get_db)): - # Ensure part exists - part = db.scalars(select(Parts).where(Parts.id == payload.part_id)).first() - if not part: - raise HTTPException(status_code=404, detail="Part not found") - - # Insert PartsAdded row (do NOT mutate Parts.initial_count) - added = PartsAdded( - part_id=payload.part_id, - count=payload.count, - date=payload.date, - note=payload.note, - ) - db.add(added) - db.commit() - - # Return updated part with current_count computed (same formula) - used_subq = ( - select( - PartsUsed.part_id.label("part_id"), - func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), - ) - .group_by(PartsUsed.part_id) - .subquery() - ) - - added_subq = ( - select( - PartsAdded.part_id.label("part_id"), - func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), - ) - .group_by(PartsAdded.part_id) - .subquery() - ) - - current_count = ( - Parts.initial_count - + func.coalesce(added_subq.c.added_sum, 0) - - func.coalesce(used_subq.c.used_sum, 0) - ).label("current_count") - - row = db.execute( - select(Parts, current_count) - .outerjoin(used_subq, used_subq.c.part_id == Parts.id) - .outerjoin(added_subq, added_subq.c.part_id == Parts.id) - .where(Parts.id == payload.part_id) - .options(selectinload(Parts.part_type), selectinload(Parts.meter_types)) - ).first() - - if not row: - raise HTTPException(status_code=404, detail="Part not found") - - part_obj, curr = row - part_obj.current_count = curr - return part_obj - - -@part_router.get( - "/parts/{part_id}/history", - response_model=part_schemas.PartHistoryResponse, - dependencies=[Depends(ScopedUser.Admin)], - tags=["Parts"], -) -def get_part_history(part_id: int, db: Session = Depends(get_db)): - return _build_part_history_response(part_id, db) + +@part_router.get( + "/part", + response_model=Union[parts.Part, parts.Register], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], +) +def get_part(part_id: int, db: Session = Depends(get_db)): + return part_service.get_part(db, part_id) @part_router.patch( - "/parts/{part_id}/history", - response_model=part_schemas.PartHistoryResponse, + "/part", + response_model=parts.Part, dependencies=[Depends(ScopedUser.Admin)], tags=["Parts"], ) -def patch_part_history( - part_id: int, - payload: part_schemas.PartHistoryUpdateRequest, - db: Session = Depends(get_db), -): - part = db.scalars(select(Parts).where(Parts.id == part_id)).first() - if not part: - raise HTTPException(status_code=404, detail="Part not found") - - for row in payload.rows: - normalized_note = row.note.strip() if row.note else None - if normalized_note == "": - normalized_note = None - - if row.event_type == "added": - if row.delta <= 0: - raise HTTPException( - status_code=422, - detail="Added parts rows must have a positive change.", - ) - - added_row = db.scalars( - select(PartsAdded).where( - PartsAdded.id == row.ref_id, - PartsAdded.part_id == part_id, - ) - ).first() - if not added_row: - raise HTTPException(status_code=404, detail="Parts added row not found.") - - added_row.count = row.delta - added_row.date = row.event_date.date() - added_row.note = normalized_note +def update_part(updated_part: parts.Part, db: Session = Depends(get_db)): + part_db = _get(db, Parts, updated_part.id) + + for k, v in updated_part.model_dump(exclude_unset=True).items(): + if k in ["part_type", "meter_types", "current_count"]: + continue + try: + setattr(part_db, k, v) + except AttributeError as e: + print(e) continue - if row.delta >= 0: - raise HTTPException( - status_code=422, - detail="Work order rows must have a negative change.", - ) + try: + db.add(part_db) + db.commit() + except IntegrityError: + raise HTTPException(status_code=409, detail="Part SN already exists") - parts_used_row = db.scalars( - select(PartsUsed).where( - PartsUsed.id == row.ref_id, - PartsUsed.part_id == part_id, - ) - ).first() - if not parts_used_row: - raise HTTPException(status_code=404, detail="Parts used row not found.") + part = db.scalars( + select(Parts).where(Parts.id == updated_part.id).options(joinedload(Parts.part_type)) + ).first() - activity = db.scalars( - select(MeterActivities).where( - MeterActivities.id == parts_used_row.meter_activity_id - ) - ).first() - if not activity: - raise HTTPException( - status_code=404, - detail="Meter activity for parts used row not found.", + if updated_part.meter_types: + part.meter_types = db.scalars( + select(MeterTypeLU).where( + MeterTypeLU.id.in_(map(lambda type: type.id, updated_part.meter_types)) ) + ).all() - original_start = activity.timestamp_start - original_end = activity.timestamp_end - duration = original_end - original_start if original_end and original_start else None + db.commit() + db.refresh(part) - parts_used_row.count = abs(row.delta) - activity.timestamp_start = row.event_date - activity.description = normalized_note - if duration is not None: - activity.timestamp_end = row.event_date + duration - else: - activity.timestamp_end = row.event_date + return part + + +@part_router.post( + "/parts", + response_model=parts.Part, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def create_part(new_part: parts.Part, db: Session = Depends(get_db)): + new_part_model = Parts( + part_number=new_part.part_number, + part_type_id=new_part.part_type_id, + description=new_part.description, + vendor=new_part.vendor, + initial_count=new_part.initial_count, + note=new_part.note, + in_use=new_part.in_use, + commonly_used=new_part.commonly_used, + price=new_part.price, + ) + + try: + db.add(new_part_model) + db.commit() + except IntegrityError: + raise HTTPException(status_code=409, detail="Part SN already exists") + + if new_part.meter_types: + new_part_model.meter_types = db.scalars( + select(MeterTypeLU).where( + MeterTypeLU.id.in_(map(lambda type: type.id, new_part.meter_types)) + ) + ).all() db.commit() - return _build_part_history_response(part_id, db) + db.refresh(new_part_model) + new_part_model.part_type + + return new_part_model + + +@part_router.get( + "/meter_parts", + response_model=List[parts.Part], + dependencies=[Depends(ScopedUser.Read)], + tags=["Parts"], +) +def get_meter_parts(meter_id: int, db: Session = Depends(get_db)): + meter_type_id = db.scalars( + select(Meters.meter_type_id).where(Meters.id == meter_id) + ).first() + + part_id_list = db.scalars( + select(PartAssociation.c.part_id).where( + PartAssociation.c.meter_type_id == meter_type_id + ) + ).all() + + meter_parts = db.scalars( + select(Parts) + .where(Parts.id.in_(part_id_list)) + .options(joinedload(Parts.part_type)) + ).all() + + return meter_parts + + +@part_router.post( + "/parts/add", + response_model=parts.Part, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def add_parts(payload: parts.PartsAddRequest, db: Session = Depends(get_db)): + return part_service.add_parts(db, payload) + + +@part_router.get( + "/parts/{part_id}/history", + response_model=parts.PartHistoryResponse, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def get_part_history(part_id: int, db: Session = Depends(get_db)): + return part_service.build_part_history_response(part_id, db) + + +@part_router.patch( + "/parts/{part_id}/history", + response_model=parts.PartHistoryResponse, + dependencies=[Depends(ScopedUser.Admin)], + tags=["Parts"], +) +def patch_part_history( + part_id: int, + payload: parts.PartHistoryUpdateRequest, + db: Session = Depends(get_db), +): + return part_service.patch_part_history(db, part_id, payload) diff --git a/api/routes/settings.py b/api/routes/settings.py index 90c51e15..4d2ce294 100644 --- a/api/routes/settings.py +++ b/api/routes/settings.py @@ -5,10 +5,10 @@ from PIL import Image, UnidentifiedImageError from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Session -from api.schemas.base import ORMBase +from api.schemas import settings from api.session import get_db from api.security import get_current_user, get_password_hash, verify_password -from api.models.main_models import Users +from api.models.user import Users settings_router = APIRouter() @@ -36,17 +36,12 @@ def get_redirect_page( return {"redirect_page": db_user.redirect_page} - -class RedirectPageUpdate(ORMBase): - redirect_page: str - - @settings_router.post( "/settings/redirect_page", tags=["settings"], ) def post_redirect_page( - update: RedirectPageUpdate, + update: settings.RedirectPageUpdate, db: Session = Depends(get_db), user: Users = Depends(get_current_user), ): @@ -60,22 +55,12 @@ def post_redirect_page( return {"message": "Redirect page updated", "redirect_page": db_user.redirect_page} - -class DisplayNameUpdate(ORMBase): - display_name: str - - -class PasswordResetRequest(ORMBase): - current_password: str - new_password: str - - @settings_router.post( "/settings/display_name", tags=["settings"], ) def post_redirect_page( - update: DisplayNameUpdate, + update: settings.DisplayNameUpdate, db: Session = Depends(get_db), user: Users = Depends(get_current_user), ): @@ -95,7 +80,7 @@ def post_redirect_page( tags=["settings"], ) def post_password_reset( - update: PasswordResetRequest, + update: settings.PasswordResetRequest, db: Session = Depends(get_db), user: Users = Depends(get_current_user), ): diff --git a/api/routes/user_sessions.py b/api/routes/user_sessions.py index 466c96af..09832923 100644 --- a/api/routes/user_sessions.py +++ b/api/routes/user_sessions.py @@ -1,74 +1,23 @@ from collections import defaultdict -from datetime import datetime -from typing import Optional from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session from starlette import status -from api.models.main_models import UserSessions, Users -from api.schemas.base import ORMBase +from api.models.user import UserSessions, Users +from api.schemas import user_sessions from api.security import get_current_user, get_session_identifier_from_token, oauth2_scheme from api.session import get_db -from api.session_tracking import mark_session_signed_out +from api.auth.session_tracking import mark_session_signed_out user_sessions_router = APIRouter(tags=["Login"]) - -class SessionSignOutRequest(ORMBase): - sign_out_reason_name: str - fingerprint_hash: Optional[str] = None - - -class ExpiredSessionSignOutRequest(SessionSignOutRequest): - session_identifier: str - - -class UserSessionSummary(ORMBase): - session_identifier: str - device_label: str | None = None - device_type: str | None = None - browser: str | None = None - operating_system: str | None = None - ip_address: str | None = None - signed_in_at: datetime - last_seen_at: datetime - signed_out_at: datetime | None = None - is_active: bool - sign_out_reason_name: str | None = None - is_current: bool - - -class KnownDeviceSummary(ORMBase): - device_key: str - device_label: str | None = None - device_type: str | None = None - browser: str | None = None - operating_system: str | None = None - session_count: int - active_session_count: int - signed_in_at_first: datetime - last_seen_at: datetime - is_current_device: bool - - -class UserSessionsResponse(ORMBase): - current_session_identifier: str | None = None - sessions: list[UserSessionSummary] - known_devices: list[KnownDeviceSummary] - - -class CurrentSessionStatusResponse(ORMBase): - session_identifier: str - is_active: bool - - def serialize_session( session: UserSessions, *, current_session_identifier: str | None, -) -> UserSessionSummary: - return UserSessionSummary( +) -> user_sessions.UserSessionSummary: + return user_sessions.UserSessionSummary( session_identifier=session.session_identifier, device_label=session.device_label, device_type=session.device_type, @@ -101,7 +50,7 @@ def get_known_device_key(session: UserSessions) -> str: @user_sessions_router.get( "/user-sessions/current/status", - response_model=CurrentSessionStatusResponse, + response_model=user_sessions.CurrentSessionStatusResponse, ) def get_current_session_status( _: Users = Depends(get_current_user), @@ -114,7 +63,7 @@ def get_current_session_status( detail="Session identifier is missing from token", ) - return CurrentSessionStatusResponse( + return user_sessions.CurrentSessionStatusResponse( session_identifier=current_session_identifier, is_active=True, ) @@ -122,7 +71,7 @@ def get_current_session_status( @user_sessions_router.get( "/user-sessions", - response_model=UserSessionsResponse, + response_model=user_sessions.UserSessionsResponse, ) def list_user_sessions( db: Session = Depends(get_db), @@ -149,7 +98,7 @@ def list_user_sessions( for session in sessions: grouped_sessions[get_known_device_key(session)].append(session) - known_devices: list[KnownDeviceSummary] = [] + known_devices: list[user_sessions.KnownDeviceSummary] = [] for device_key, device_sessions in grouped_sessions.items(): ordered_sessions = sorted( device_sessions, @@ -158,7 +107,7 @@ def list_user_sessions( ) newest_session = ordered_sessions[0] known_devices.append( - KnownDeviceSummary( + user_sessions.KnownDeviceSummary( device_key=device_key, device_label=newest_session.device_label, device_type=newest_session.device_type, @@ -184,7 +133,7 @@ def list_user_sessions( reverse=True, ) - return UserSessionsResponse( + return user_sessions.UserSessionsResponse( current_session_identifier=current_session_identifier, sessions=serialized_sessions, known_devices=known_devices, @@ -231,7 +180,7 @@ def revoke_user_session( @user_sessions_router.post("/logout") def logout_current_session( - payload: SessionSignOutRequest, + payload: user_sessions.SessionSignOutRequest, db: Session = Depends(get_db), _: Users = Depends(get_current_user), token: str = Depends(oauth2_scheme), @@ -259,7 +208,7 @@ def logout_current_session( @user_sessions_router.post("/logout/expired") def logout_expired_session( - payload: ExpiredSessionSignOutRequest, + payload: user_sessions.ExpiredSessionSignOutRequest, db: Session = Depends(get_db), ): session = mark_session_signed_out( diff --git a/api/route_util.py b/api/routes/utils.py similarity index 100% rename from api/route_util.py rename to api/routes/utils.py diff --git a/api/routes/well_measurements.py b/api/routes/well_measurements.py index 3a7704dc..348ea06d 100644 --- a/api/routes/well_measurements.py +++ b/api/routes/well_measurements.py @@ -1,48 +1,17 @@ -from typing import List, Optional, Any, Dict +from typing import List, Optional from datetime import datetime, date -import re from fastapi import Depends, APIRouter, Query, HTTPException from fastapi.responses import StreamingResponse -from sqlalchemy.orm import Session, joinedload -from sqlalchemy import select, and_, func +from sqlalchemy.orm import Session +from sqlalchemy import select -from weasyprint import HTML -from io import BytesIO -from collections import defaultdict -from matplotlib.pyplot import figure, close -from base64 import b64encode - -from api.schemas import well_schemas -from api.models.main_models import ( - WellMeasurements, - ObservedPropertyTypeLU, - Units, - Wells, -) +from api.schemas import well +from api.models.meter import ObservedPropertyTypeLU, Units +from api.models.well import WellMeasurements from api.session import get_db -from api.enums import ScopedUser -from google.cloud import storage - -from pathlib import Path -from jinja2 import Environment, FileSystemLoader, select_autoescape -from zoneinfo import ZoneInfo - -import zlib -import json -import os -import matplotlib - -matplotlib.use("Agg") # Force non-GUI backend - -WOODPECKER_BUCKET_NAME = os.getenv("GCP_WOODPECKER_BUCKET_NAME", "") - -TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" - -templates = Environment( - loader=FileSystemLoader(TEMPLATES_DIR), - autoescape=select_autoescape(["html", "xml"]), -) +from api.auth.dependencies import ScopedUser +from api.services import well_measurements as well_measurement_service authenticated_well_measurement_router = APIRouter() public_well_measurement_router = APIRouter() @@ -51,11 +20,11 @@ @authenticated_well_measurement_router.post( "/waterlevels", dependencies=[Depends(ScopedUser.WellMeasurementWrite)], - response_model=well_schemas.WellMeasurement, + response_model=well.WellMeasurement, tags=["WaterLevels"], ) def add_waterlevel( - waterlevel: well_schemas.NewWaterLevelMeasurement, db: Session = Depends(get_db) + waterlevel: well.NewWaterLevelMeasurement, db: Session = Depends(get_db) ): # Create the well measurement from the form, qualify with units and property type well_measurement = WellMeasurements( @@ -81,111 +50,21 @@ def add_waterlevel( @public_well_measurement_router.get( "/waterlevels/woodpeckers", - response_model=List[well_schemas.WellMeasurementDTO], + response_model=List[well.WellMeasurementDTO], tags=["WaterLevels"], ) def read_woodpecker_waterlevels( well_id: int = Query(..., description="At least one well ID is required"), ): - SP_JOHNSON_WELL_ID = 2599 - - if well_id != SP_JOHNSON_WELL_ID: - raise HTTPException(status_code=400, detail="Invalid well ID") - - DEPTH_TO_WATER_SENSOR_NAME = "Depth to Water" - - results: List[well_schemas.WellMeasurementDTO] = [] - seen_timestamps: set[str] = set() - - client = storage.Client() - bucket = client.bucket(WOODPECKER_BUCKET_NAME) - - for blob in bucket.list_blobs(): - if not blob.name.endswith(".json"): - continue - - content = blob.download_as_text() - payload = json.loads(content) - - device_attributes = payload.get("deviceAttributes") or {} - tz_name = device_attributes.get("timeZone") or "UTC" - ra_number = device_attributes.get("wellId") or "" # e.g. "RA-3502" - - sensor_data = payload.get("sensorData") or [] - depth_sensor = next( - ( - s - for s in sensor_data - if (s.get("sensorName") or "").strip() == DEPTH_TO_WATER_SENSOR_NAME - ), - None, - ) - if not depth_sensor: - # No "Depth to Water" in this file; skip - continue - - measurements = depth_sensor.get("measurements") or [] - for m in measurements: - raw_ts = m.get("timestamp") - if not raw_ts: - continue - - ts = _parse_woodpecker_timestamp(raw_ts, tz_name) - - # Deduplicate by exact instant string (timezone-aware isoformat if tz parsed) - ts_key = ts.isoformat() - if ts_key in seen_timestamps: - continue - seen_timestamps.add(ts_key) - - raw_value = m.get("data") - value = abs(raw_value) if raw_value is not None else None - - measurement_id = _make_measurement_id(well_id, ts, value) - - results.append( - well_schemas.WellMeasurementDTO( - id=measurement_id, - timestamp=ts, - value=value, - submitting_user=well_schemas.WellMeasurementDTO.UserDTO( - full_name="Woodpeckers" - ), - well=well_schemas.WellMeasurementDTO.WellDTO(ra_number=ra_number), - ) - ) - - # Sort combined results across all files - results.sort(key=lambda r: r.timestamp) - return results - - -def _parse_woodpecker_timestamp(ts: str, tz_name: str) -> datetime: - """ - Payload timestamp format: "DD/MM/YYYY HH:mm:ss" - Example: "29/12/2025 00:20:40" - """ - dt_naive = datetime.strptime(ts, "%d/%m/%Y %H:%M:%S") try: - tz = ZoneInfo(tz_name) - except Exception: - # Fallback: keep naive if timezone is missing/invalid - return dt_naive - return dt_naive.replace(tzinfo=tz) - - -def _make_measurement_id(well_id: int, ts: datetime, value: Optional[float]) -> int: - """ - Since the incoming format doesn't provide an integer measurement id, - generate a deterministic-ish int id from well_id + timestamp + value. - """ - key = f"{well_id}|{ts.isoformat()}|{value if value is not None else 'null'}" - return zlib.crc32(key.encode("utf-8")) + return well_measurement_service.read_woodpecker_waterlevels(well_id) + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) @public_well_measurement_router.get( "/waterlevels", - response_model=List[well_schemas.WellMeasurementDTO], + response_model=List[well.WellMeasurementDTO], tags=["WaterLevels"], ) def read_waterlevels( @@ -201,139 +80,18 @@ def read_waterlevels( comparisonYear: Optional[str] = Query(None, pattern=r"^$|^\d{4}$"), db: Session = Depends(get_db), ): - """ - Return well measurements, optionally filtered by from_date/to_date, - with optional averaging and historical comparison. - """ - MONITORING_USE_TYPE_ID = 11 - synthetic_id_counter = -1 - - def group_and_average(measurements, group_by_label: str): - from collections import defaultdict - - grouped = defaultdict(list) - for m in measurements: - key = m.timestamp.strftime( - "%Y-%m" if group_by_label == "month" else "%Y-%m-%d" - ) - grouped[key].append(m.value) - - result = [] - for time_str, values in sorted(grouped.items()): - dt = datetime.strptime( - time_str, - "%Y-%m" if group_by_label == "month" else "%Y-%m-%d", - ) - avg_value = sum(values) / len(values) - nonlocal synthetic_id_counter - result.append( - well_schemas.WellMeasurementDTO( - id=synthetic_id_counter, - timestamp=dt, - value=avg_value, - submitting_user={"full_name": "System"}, - well={"ra_number": "Average of wells"}, - ) - ) - synthetic_id_counter -= 1 - return result - - def get_measurements_by_ids(well_ids, start: Optional[date], end: Optional[date]): - filters = [ - ObservedPropertyTypeLU.name == "Depth to water", - WellMeasurements.well_id.in_(well_ids), - ] - if start: - filters.append(WellMeasurements.timestamp >= start) - if end: - # include full day when end is provided - end_dt = datetime.combine(end, datetime.max.time()) - filters.append(WellMeasurements.timestamp <= end_dt) - - stmt = ( - select(WellMeasurements) - .options( - joinedload(WellMeasurements.submitting_user), - joinedload(WellMeasurements.well), - ) - .join(ObservedPropertyTypeLU) - .where(and_(*filters)) - .order_by(WellMeasurements.well_id, WellMeasurements.timestamp) + try: + return well_measurement_service.read_waterlevels( + db=db, + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + is_averaging_all_wells=isAveragingAllWells, + is_comparing_to_1970_average=isComparingTo1970Average, + comparison_year=comparisonYear, ) - return db.scalars(stmt).all() - - # Decide grouping granularity only if both dates are given - group_by = None - if from_date and to_date: - group_by = "month" if (to_date - from_date).days >= 365 else "day" - - if not well_ids and not isComparingTo1970Average and not comparisonYear: - return [] - - response_data: List[well_schemas.WellMeasurementDTO] = [] - - # Averaged selection (if requested) - if isAveragingAllWells and well_ids: - current_measurements = get_measurements_by_ids(well_ids, from_date, to_date) - averaged = group_and_average(current_measurements, group_by or "day") - response_data.extend(averaged) - - # Raw per-well (if not averaging) - if not isAveragingAllWells and well_ids: - response_data.extend(get_measurements_by_ids(well_ids, from_date, to_date)) - - # Helper: add a comparison average for any given year - def add_year_average(year: int, label: str): - # pick full year or same-month window depending on user’s range - if from_date and to_date and (to_date - from_date).days >= 365: - start = datetime(year, 1, 1) - end = datetime(year, 12, 31, 23, 59, 59) - else: - # fallback: use provided month(s) if available, otherwise full year - if from_date and to_date: - start = datetime(year, from_date.month, 1) - import calendar - - last_day = calendar.monthrange(year, to_date.month)[1] - end = datetime(year, to_date.month, last_day, 23, 59, 59) - else: - start = datetime(year, 1, 1) - end = datetime(year, 12, 31, 23, 59, 59) - - monitoring_ids = [ - row[0] - for row in db.execute( - select(Wells.id).where(Wells.use_type_id == MONITORING_USE_TYPE_ID) - ).all() - ] - year_measurements = get_measurements_by_ids(monitoring_ids, start, end) - averaged = group_and_average(year_measurements, "month") - for dto in averaged: - dto.well.ra_number = label - response_data.extend(averaged) - - if isComparingTo1970Average: - add_year_average(1970, "1970 Average") - - if comparisonYear: - try: - year_int = int(comparisonYear) - except ValueError: - raise HTTPException( - status_code=400, detail="comparisonYear must be a 4-digit year" - ) - - current_year = datetime.now().year - if year_int < 1900 or year_int > current_year: - raise HTTPException( - status_code=400, - detail=f"comparisonYear must be between 1900 and {current_year}", - ) - - if not (isComparingTo1970Average and year_int == 1970): - add_year_average(year_int, f"{year_int} Average") - - return response_data + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) @public_well_measurement_router.get( @@ -365,7 +123,7 @@ def read_waterlevel_report_averages( status_code=400, detail="from_date and/or to_date is required for reports" ) - return get_waterlevel_report_averages( + return well_measurement_service.get_waterlevel_report_averages( well_ids=well_ids, from_date=from_date, to_date=to_date, @@ -392,131 +150,20 @@ def download_waterlevels_pdf( Reuses the read_waterlevels() endpoint for data. """ - # Reuse the endpoint logic - data = read_waterlevels( - well_ids=well_ids, - from_date=from_date, - to_date=to_date, - isAveragingAllWells=isAveragingAllWells, - isComparingTo1970Average=isComparingTo1970Average, - comparisonYear=comparisonYear, - db=db, - ) - - if not data: - raise HTTPException(status_code=404, detail="No water-level data found") - - from_year = from_date.year - shift_years = set() - if isComparingTo1970Average: - shift_years.add(1970) - if comparisonYear: - try: - shift_years.add(int(comparisonYear)) - except ValueError: - pass # already validated above - - def shift_year_safe(dt, new_year: int): - """Shift dt to new_year, handling Feb 29 / month-end safely.""" - import calendar - - try: - return dt.replace(year=new_year) - except ValueError: - last_day = calendar.monthrange(new_year, dt.month)[1] - return dt.replace(year=new_year, day=min(dt.day, last_day)) - - # Prepare rows for the table and points for the chart - rows = [] - data_by_well = defaultdict(list) - - for m in data: - # m is a WellMeasurementDTO from read_waterlevels - ts = m.timestamp - val = m.value - ra = m.well["ra_number"] if isinstance(m.well, dict) else m.well.ra_number - - rows.append( - { - "timestamp": ts.strftime("%Y-%m-%d %H:%M"), - "depth_to_water": val, - "well_ra_number": ra, - } - ) - - chart_ts = ts - if from_year: - m_match = re.match(r"^(\d{4}) Average$", ra) - if m_match: - yr = int(m_match.group(1)) - if yr in shift_years: - chart_ts = shift_year_safe(ts, from_year) - - data_by_well[ra].append((chart_ts, val)) - - def make_line_chart(data: dict, title: str): - if not data: - return "" - fig = figure(figsize=(10, 6)) - ax = fig.add_subplot(111) - for ra_label, measurements in data.items(): - sorted_m = sorted(measurements, key=lambda x: x[0]) - timestamps = [ts for ts, _ in sorted_m] - values = [val for _, val in sorted_m] - ax.plot(timestamps, values, label=ra_label, marker="o") - ax.set_title(title) - ax.set_xlabel("Time") - ax.set_ylabel("Depth to Water") - ax.invert_yaxis() - - # Reserve Space on the top right & move legend outside the plot area to that reserved area - fig.subplots_adjust(right=0.78) - ax.legend( - loc="center left", - bbox_to_anchor=(1.02, 0.5), - borderaxespad=0.0, - frameon=True, - ) - - fig.autofmt_xdate() - buf = BytesIO() - fig.savefig(buf, format="png", bbox_inches="tight") - close(fig) - return b64encode(buf.getvalue()).decode("utf-8") - - chart_b64 = make_line_chart(data_by_well, "Depth of Water over Time") - - report_title = "ROSWELL ARTESIAN BASIN" - report_subtext = None - if isAveragingAllWells: - num_wells = len(well_ids) - well_word = "WELL" if num_wells == 1 else "WELLS" - report_subtext = ( - f"MONTHLY AVERAGE WATER LEVEL WITHIN {num_wells} PVACD RECORDER {well_word}\n" - "AVERAGES TAKEN FROM STEEL TAPE MEASUREMENTS MADE\n" - "ON OR NEAR THE 5TH, 15TH AND 25TH OF EACH MONTH" + try: + pdf_io = well_measurement_service.build_waterlevels_pdf( + db=db, + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + is_averaging_all_wells=isAveragingAllWells, + is_comparing_to_1970_average=isComparingTo1970Average, + comparison_year=comparisonYear, ) - - averages = get_waterlevel_report_averages( - well_ids=well_ids, - from_date=from_date, - to_date=to_date, - db=db, - ) - - html = templates.get_template("waterlevels_report.html").render( - from_date=from_date, - to_date=to_date, - observation_chart=chart_b64, - rows=rows, - report_title=report_title, - report_subtext=report_subtext, - averages=averages, - ) - - pdf_io = BytesIO() - HTML(string=html).write_pdf(pdf_io) - pdf_io.seek(0) + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) + except LookupError as exc: + raise HTTPException(status_code=404, detail=str(exc)) return StreamingResponse( pdf_io, @@ -528,11 +175,11 @@ def make_line_chart(data: dict, title: str): @authenticated_well_measurement_router.patch( "/waterlevels", dependencies=[Depends(ScopedUser.Admin)], - response_model=well_schemas.WellMeasurement, + response_model=well.WellMeasurement, tags=["WaterLevels"], ) def patch_waterlevel( - waterlevel_patch: well_schemas.PatchWaterLevel, db: Session = Depends(get_db) + waterlevel_patch: well.PatchWaterLevel, db: Session = Depends(get_db) ): # Find the measurement well_measurement = db.scalars( @@ -566,104 +213,3 @@ def delete_waterlevel(waterlevel_id: int, db: Session = Depends(get_db)): db.commit() return True - - -def get_waterlevel_report_averages( - *, - well_ids: List[int], - from_date: Optional[date], - to_date: Optional[date], - db: Session, -) -> Dict[str, Any]: - """ - Shared logic used by both JSON endpoint and PDF endpoint. - Returns: - { - "bucket": "month" | "year", - "per_well": [ { well_id, ra_number, period_start, avg_value }, ...], - "all_wells": [ { period_start, avg_value }, ...], - } - """ - DEPTH_TO_WATER_NAME = "Depth to water" - - if not well_ids: - return {"bucket": None, "per_well": [], "all_wells": []} - - if from_date is None and to_date is None: - # Let callers decide whether to raise; for PDF we always have both. - return {"bucket": None, "per_well": [], "all_wells": []} - - start_dt = datetime.combine(from_date, datetime.min.time()) if from_date else None - end_dt = datetime.combine(to_date, datetime.max.time()) if to_date else None - - if from_date and to_date: - delta_days = (to_date - from_date).days - bucket_unit = "year" if delta_days >= 365 else "month" - else: - bucket_unit = "month" - - bucket = func.date_trunc(bucket_unit, WellMeasurements.timestamp).label( - "period_start" - ) - - base_filters = [ - ObservedPropertyTypeLU.name == DEPTH_TO_WATER_NAME, - WellMeasurements.well_id.in_(well_ids), - ] - if start_dt: - base_filters.append(WellMeasurements.timestamp >= start_dt) - if end_dt: - base_filters.append(WellMeasurements.timestamp <= end_dt) - - per_well_stmt = ( - select( - WellMeasurements.well_id.label("well_id"), - Wells.ra_number.label("ra_number"), - bucket, - func.avg(WellMeasurements.value).label("avg_value"), - ) - .join(Wells, Wells.id == WellMeasurements.well_id) - .join( - ObservedPropertyTypeLU, - ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, - ) - .where(and_(*base_filters)) - .group_by(WellMeasurements.well_id, Wells.ra_number, bucket) - .order_by(Wells.ra_number, bucket) - ) - per_well_rows = db.execute(per_well_stmt).all() - - all_wells_stmt = ( - select( - bucket, - func.avg(WellMeasurements.value).label("avg_value"), - ) - .join( - ObservedPropertyTypeLU, - ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, - ) - .where(and_(*base_filters)) - .group_by(bucket) - .order_by(bucket) - ) - all_wells_rows = db.execute(all_wells_stmt).all() - - return { - "bucket": bucket_unit, - "per_well": [ - { - "well_id": r.well_id, - "ra_number": r.ra_number, - "period_start": r.period_start, - "avg_value": float(r.avg_value) if r.avg_value is not None else None, - } - for r in per_well_rows - ], - "all_wells": [ - { - "period_start": r.period_start, - "avg_value": float(r.avg_value) if r.avg_value is not None else None, - } - for r in all_wells_rows - ], - } diff --git a/api/routes/wells.py b/api/routes/wells.py index 47613e56..9039642a 100644 --- a/api/routes/wells.py +++ b/api/routes/wells.py @@ -7,11 +7,13 @@ from fastapi_pagination.ext.sqlalchemy import paginate from fastapi_pagination import LimitOffsetPage -from api.schemas import well_schemas -from api.models.main_models import Locations, WaterSources, WellStatus, WellUseLU, Wells -from api.route_util import _patch, _get +from api.schemas import well +from api.models.location import Locations +from api.models.well import WaterSources, WellStatus, WellUseLU, Wells +from api.routes.utils import _patch, _get from api.session import get_db -from api.enums import ScopedUser, WellSortByField, SortDirection +from api.auth.dependencies import ScopedUser +from api.enums import WellSortByField, SortDirection public_well_router = APIRouter() authenticated_well_router = APIRouter() @@ -20,7 +22,7 @@ @authenticated_well_router.get( "/use_types", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WellUseLU], + response_model=List[well.WellUseLU], tags=["Wells"], ) def get_use_types( @@ -33,7 +35,7 @@ def get_use_types( @authenticated_well_router.get( "/water_sources", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WaterSources], + response_model=List[well.WaterSources], tags=["Wells"], ) def get_water_sources( @@ -46,7 +48,7 @@ def get_water_sources( @authenticated_well_router.get( "/well_status_types", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WellStatus], + response_model=List[well.WellStatus], tags=["Wells"], ) def get_well_status_types( @@ -57,7 +59,7 @@ def get_well_status_types( @public_well_router.get( "/wells/{well_id}", - response_model=well_schemas.WellResponse, + response_model=well.WellResponse, tags=["Wells"], ) def get_well_by_id(well_id: int, db: Session = Depends(get_db)): @@ -79,7 +81,7 @@ def get_well_by_id(well_id: int, db: Session = Depends(get_db)): @public_well_router.get( "/wells", - response_model=LimitOffsetPage[well_schemas.WellResponse], + response_model=LimitOffsetPage[well.WellResponse], tags=["Wells"], ) def get_wells( @@ -155,10 +157,10 @@ def sort_by_field_to_schema_field(name: WellSortByField): @authenticated_well_router.patch( "/wells", dependencies=[Depends(ScopedUser.WellWrite)], - response_model=well_schemas.WellResponse, + response_model=well.WellResponse, tags=["Wells"], ) -def update_well(updated_well: well_schemas.WellUpdate, db: Session = Depends(get_db)): +def update_well(updated_well: well.WellUpdate, db: Session = Depends(get_db)): # If present, update location and remove from model if updated_well.location: _patch(db, Locations, updated_well.location.id, updated_well.location) @@ -216,7 +218,7 @@ def update_well(updated_well: well_schemas.WellUpdate, db: Session = Depends(get dependencies=[Depends(ScopedUser.Admin)], tags=["Wells"], ) -def create_well(new_well: well_schemas.SubmitWellCreate, db: Session = Depends(get_db)): +def create_well(new_well: well.SubmitWellCreate, db: Session = Depends(get_db)): # First, commit the new location that was added with the new well new_location_model = Locations( name=new_well.location.name, @@ -267,7 +269,7 @@ def create_well(new_well: well_schemas.SubmitWellCreate, db: Session = Depends(g @authenticated_well_router.get( "/well_locations", dependencies=[Depends(ScopedUser.Read)], - response_model=List[well_schemas.WellResponse], + response_model=List[well.WellResponse], tags=["Wells"], ) def get_wells_locations( @@ -305,7 +307,7 @@ def get_wells_locations( @authenticated_well_router.get( "/well", dependencies=[Depends(ScopedUser.Read)], - response_model=well_schemas.Well, + response_model=well.Well, tags=["Wells"], ) def get_well(well_id: int, db: Session = Depends(get_db)): @@ -323,7 +325,7 @@ def get_well(well_id: int, db: Session = Depends(get_db)): dependencies=[Depends(ScopedUser.Admin)], tags=["Wells"], ) -def merge_well(well: well_schemas.SubmitWellMerge, db: Session = Depends(get_db)): +def merge_well(well: well.SubmitWellMerge, db: Session = Depends(get_db)): """ Transfers the history of merge well to target well then deletes the merge well """ diff --git a/api/routes/work_orders.py b/api/routes/work_orders.py new file mode 100644 index 00000000..9ca7ff44 --- /dev/null +++ b/api/routes/work_orders.py @@ -0,0 +1,77 @@ +from datetime import datetime +from typing import Annotated + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.orm import Session + +from api.auth.dependencies import ScopedUser +from api.enums import WorkOrderStatus +from api.schemas import meter +from api.security import get_current_user +from api.services import work_orders as work_order_service +from api.models.user import Users +from api.session import get_db + + +work_orders_router = APIRouter() + + +@work_orders_router.get( + "/work_orders", + dependencies=[Depends(ScopedUser.Read)], + tags=["Work Orders"], +) +def get_work_orders( + filter_by_status: Annotated[list[WorkOrderStatus], Query()] = [ + WorkOrderStatus.Open + ], + start_date: datetime = Query(datetime.strptime("2024-06-01", "%Y-%m-%d")), + work_order_id: Annotated[list[int] | None, Query()] = None, + assigned_user_id: int | None = None, + q: str | None = None, + db: Session = Depends(get_db), +): + return work_order_service.list_work_orders( + db=db, + filter_by_status=[status.value for status in filter_by_status], + start_date=start_date, + work_order_id=work_order_id, + assigned_user_id=assigned_user_id, + q=q, + ) + + +@work_orders_router.post( + "/work_orders", + dependencies=[Depends(ScopedUser.Admin)], + response_model=meter.WorkOrder, + tags=["Work Orders"], +) +def create_work_order( + new_work_order: meter.CreateWorkOrder, db: Session = Depends(get_db) +): + return work_order_service.create_work_order(db=db, new_work_order=new_work_order) + + +@work_orders_router.patch( + "/work_orders", + response_model=meter.WorkOrder, + tags=["Work Orders"], +) +def patch_work_order( + patch_work_order_form: meter.PatchWorkOrder, + user: Users = Depends(get_current_user), + db: Session = Depends(get_db), +): + return work_order_service.update_work_order( + db=db, patch_work_order_form=patch_work_order_form, user=user + ) + + +@work_orders_router.delete( + "/work_orders", + dependencies=[Depends(ScopedUser.Admin)], + tags=["Work Orders"], +) +def delete_work_order(work_order_id: int, db: Session = Depends(get_db)): + return work_order_service.delete_work_order(db=db, work_order_id=work_order_id) diff --git a/api/schemas/admin_schemas.py b/api/schemas/admin.py similarity index 100% rename from api/schemas/admin_schemas.py rename to api/schemas/admin.py diff --git a/api/schemas/chlorides.py b/api/schemas/chlorides.py new file mode 100644 index 00000000..123fdbaa --- /dev/null +++ b/api/schemas/chlorides.py @@ -0,0 +1,18 @@ +from typing import Optional + +from pydantic import BaseModel + + +class MinMaxAvgMedCount(BaseModel): + min: Optional[float] = None + max: Optional[float] = None + avg: Optional[float] = None + median: Optional[float] = None + count: int = 0 + + +class ChlorideReportNums(BaseModel): + north: MinMaxAvgMedCount + south: MinMaxAvgMedCount + east: MinMaxAvgMedCount + west: MinMaxAvgMedCount diff --git a/api/schemas/maintenance.py b/api/schemas/maintenance.py new file mode 100644 index 00000000..72b338b3 --- /dev/null +++ b/api/schemas/maintenance.py @@ -0,0 +1,31 @@ +from datetime import datetime +from typing import List + +from pydantic import BaseModel + + +class MeterSummary(BaseModel): + meter: str + count: int + + +class MaintenanceRow(BaseModel): + date_time: datetime + technician: str + meter: str + trss: str + number_of_repairs: int + number_of_pms: int + + +class MaintenanceSummaryResponse(BaseModel): + repairs_by_meter: List[MeterSummary] + pms_by_meter: List[MeterSummary] + table_rows: List[MaintenanceRow] + + +class HomeSummaryResponse(BaseModel): + completed_work_orders: int + repairs_processed: int + reinstallations_processed: int + preventative_maintenance_processed: int diff --git a/api/schemas/meter_schemas.py b/api/schemas/meter.py similarity index 98% rename from api/schemas/meter_schemas.py rename to api/schemas/meter.py index 3fabc9e9..1015f7ab 100644 --- a/api/schemas/meter_schemas.py +++ b/api/schemas/meter.py @@ -1,7 +1,7 @@ from datetime import datetime from api.schemas.base import ORMBase -from api.schemas.well_schemas import Well, Location -from api.schemas.security_schemas import User +from api.schemas.well import Well, Location +from api.schemas.security import User from pydantic import BaseModel from decimal import Decimal diff --git a/api/schemas/notification_schemas.py b/api/schemas/notifications.py similarity index 94% rename from api/schemas/notification_schemas.py rename to api/schemas/notifications.py index c5a35ef0..614e7ebe 100644 --- a/api/schemas/notification_schemas.py +++ b/api/schemas/notifications.py @@ -1,7 +1,7 @@ from datetime import datetime from api.schemas.base import ORMBase -from api.schemas.security_schemas import User +from api.schemas.security import User class NotificationType(ORMBase): diff --git a/api/schemas/ose.py b/api/schemas/ose.py new file mode 100644 index 00000000..c84a897f --- /dev/null +++ b/api/schemas/ose.py @@ -0,0 +1,50 @@ +from datetime import date, datetime, time + +from pydantic import BaseModel, Field + + +class MeterActivityPhotoDTO(BaseModel): + name: str + url: str + + +class ObservationDTO(BaseModel): + observation_time: time + observation_type: str + measurement: float + units: str + + +class ActivityDTO(BaseModel): + activity_id: int + ose_request_id: int | None = None + activity_start: datetime + activity_end: datetime + activity_type: str + well_ra_number: str | None + well_ose_tag: str | None + description: str + services: list[str] = Field(default_factory=list) + notes: list[str] = Field(default_factory=list) + parts_used: list[str] = Field(default_factory=list) + observations: list[ObservationDTO] = Field(default_factory=list) + meter_activity_photos: list[MeterActivityPhotoDTO] = Field(default_factory=list) + + +class MeterHistoryDTO(BaseModel): + serial_number: str + activities: list[ActivityDTO] = Field(default_factory=list) + + +class DateHistoryDTO(BaseModel): + date: date + meters: list[MeterHistoryDTO] = Field(default_factory=list) + + +class DisapprovalStatus(BaseModel): + ose_request_id: int + status: str + notes: str | None = None + disapproval_activity: ActivityDTO | None = None + new_activities: list[ActivityDTO] | None = None + diff --git a/api/schemas/part_schemas.py b/api/schemas/parts.py similarity index 97% rename from api/schemas/part_schemas.py rename to api/schemas/parts.py index deca5789..767819ad 100644 --- a/api/schemas/part_schemas.py +++ b/api/schemas/parts.py @@ -1,7 +1,8 @@ -from typing import List, Literal, Optional from datetime import date, datetime +from typing import List, Literal, Optional + from api.schemas.base import ORMBase -from api.schemas.meter_schemas import MeterTypeLU +from api.schemas.meter import MeterTypeLU class PartTypeLU(ORMBase): diff --git a/api/schemas/security_schemas.py b/api/schemas/security.py similarity index 100% rename from api/schemas/security_schemas.py rename to api/schemas/security.py diff --git a/api/schemas/settings.py b/api/schemas/settings.py new file mode 100644 index 00000000..1968f9b2 --- /dev/null +++ b/api/schemas/settings.py @@ -0,0 +1,14 @@ +from api.schemas.base import ORMBase + + +class RedirectPageUpdate(ORMBase): + redirect_page: str + + +class DisplayNameUpdate(ORMBase): + display_name: str + + +class PasswordResetRequest(ORMBase): + current_password: str + new_password: str diff --git a/api/schemas/user_sessions.py b/api/schemas/user_sessions.py new file mode 100644 index 00000000..2381bef0 --- /dev/null +++ b/api/schemas/user_sessions.py @@ -0,0 +1,52 @@ +from datetime import datetime +from typing import Optional + +from api.schemas.base import ORMBase + + +class SessionSignOutRequest(ORMBase): + sign_out_reason_name: str + fingerprint_hash: Optional[str] = None + + +class ExpiredSessionSignOutRequest(SessionSignOutRequest): + session_identifier: str + + +class UserSessionSummary(ORMBase): + session_identifier: str + device_label: str | None = None + device_type: str | None = None + browser: str | None = None + operating_system: str | None = None + ip_address: str | None = None + signed_in_at: datetime + last_seen_at: datetime + signed_out_at: datetime | None = None + is_active: bool + sign_out_reason_name: str | None = None + is_current: bool + + +class KnownDeviceSummary(ORMBase): + device_key: str + device_label: str | None = None + device_type: str | None = None + browser: str | None = None + operating_system: str | None = None + session_count: int + active_session_count: int + signed_in_at_first: datetime + last_seen_at: datetime + is_current_device: bool + + +class UserSessionsResponse(ORMBase): + current_session_identifier: str | None = None + sessions: list[UserSessionSummary] + known_devices: list[KnownDeviceSummary] + + +class CurrentSessionStatusResponse(ORMBase): + session_identifier: str + is_active: bool diff --git a/api/schemas/well_schemas.py b/api/schemas/well.py similarity index 98% rename from api/schemas/well_schemas.py rename to api/schemas/well.py index e19cccd6..d77b3ff3 100644 --- a/api/schemas/well_schemas.py +++ b/api/schemas/well.py @@ -1,7 +1,7 @@ from datetime import datetime from pydantic import BaseModel from typing import List -from api.schemas.security_schemas import User +from api.schemas.security import User from api.schemas.base import ORMBase diff --git a/api/security.py b/api/security.py index 1728b8a2..819e1651 100644 --- a/api/security.py +++ b/api/security.py @@ -10,16 +10,20 @@ from sqlalchemy.orm import joinedload, undefer, Session from sqlalchemy.sql import select -from api.models.main_models import Users, UserRoles, SecurityScopes, UserSessions -from api.schemas import security_schemas +from api.models.user import Users, UserRoles, SecurityScopes, UserSessions +from api.schemas import security as security_schema +from api.config import settings from api.session import get_db oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -SECRET_KEY = "09d25e194fbb6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" -ALGORITHM = "HS256" -ACCESS_TOKEN_EXPIRE_HOURS = 8 +SECRET_KEY = settings.JWT_SECRET_KEY +ALGORITHM = settings.JWT_ALGORITHM +ACCESS_TOKEN_EXPIRE_HOURS = settings.ACCESS_TOKEN_EXPIRE_HOURS + +if not SECRET_KEY: + raise RuntimeError("JWT_SECRET_KEY environment variable must be set.") invalid_credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -197,10 +201,10 @@ def get_user(current_user: Users = Security(get_current_user)): @authenticated_router.get( - "/users/me", response_model=security_schemas.User, tags=["Login"] + "/users/me", response_model=security_schema.User, tags=["Login"] ) def read_users_me( - current_user: security_schemas.User = Depends(get_current_user), + current_user: security_schema.User = Depends(get_current_user), ): return current_user diff --git a/api/services/__init__.py b/api/services/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/api/services/__init__.py @@ -0,0 +1 @@ + diff --git a/api/services/activities.py b/api/services/activities.py new file mode 100644 index 00000000..8c754bec --- /dev/null +++ b/api/services/activities.py @@ -0,0 +1,360 @@ +from datetime import datetime + +from fastapi import HTTPException +from sqlalchemy import select, text +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session, joinedload, undefer + +from api.models.location import Locations +from api.models.meter import ( + ActivityTypeLU, + MeterActivities, + MeterObservations, + MeterStatusLU, + Meters, + NoteTypeLU, + ObservedPropertyTypeLU, + ServiceTypeLU, + Units, +) +from api.models.part import Parts +from api.models.user import Users +from api.models.well import Wells +from api.schemas import meter +from api.services import storage as storage_service + + +def _get_hq_location(db: Session): + return db.scalars(select(Locations).where(Locations.type_id == 1)).first() + + +async def create_activity( + db: Session, + activity_form: meter.ActivityForm, + user: Users, + photos, + max_photos_per_meter: int, +): + update_meter_state = True + user_level = user.user_role.name + + last_activity = db.scalars( + select(MeterActivities) + .where(MeterActivities.meter_id == activity_form.activity_details.meter_id) + .order_by(MeterActivities.timestamp_end.desc()) + .limit(1) + ).first() + + activity_date = activity_form.activity_details.date.date() + starttime = activity_form.activity_details.start_time.time().replace(second=0) + endtime = activity_form.activity_details.end_time.time().replace(second=0) + start_datetime = datetime.combine(activity_date, starttime) + end_datetime = datetime.combine(activity_date, endtime) + + if last_activity and last_activity.timestamp_end > end_datetime: + update_meter_state = False + if user_level != "Admin": + raise HTTPException( + status_code=409, + detail="Submitted activity is older than the last activity.", + ) + + activity_meter = db.scalars( + select(Meters).where(activity_form.activity_details.meter_id == Meters.id) + ).first() + activity_type = db.scalars( + select(ActivityTypeLU).where( + activity_form.activity_details.activity_type_id == ActivityTypeLU.id + ) + ).first() + hq_location = _get_hq_location(db) + + activity_well = None + if activity_form.current_installation.well_id: + activity_well = db.scalars( + select(Wells).where(activity_form.current_installation.well_id == Wells.id) + ).first() + activity_location = activity_well.location.id + else: + activity_location = hq_location.id + + meter_activity = MeterActivities( + timestamp_start=start_datetime, + timestamp_end=end_datetime, + description=activity_form.maintenance_repair.description, + submitting_user_id=activity_form.activity_details.user_id, + meter_id=activity_form.activity_details.meter_id, + activity_type_id=activity_form.activity_details.activity_type_id, + location_id=activity_location, + ose_share=activity_form.activity_details.share_ose, + water_users=activity_form.current_installation.water_users, + ) + if activity_form.activity_details.work_order_id: + meter_activity.work_order_id = activity_form.activity_details.work_order_id + + try: + db.add(meter_activity) + db.commit() + db.refresh(meter_activity) + except IntegrityError: + raise HTTPException( + status_code=409, detail="Activity overlaps with existing activity." + ) + + db.flush() + + share_ose_observation = bool(activity_form.activity_details.share_ose) + for observation_form in activity_form.observations: + observation_datetime = datetime.combine( + activity_date, observation_form.time.time() + ) + db.add( + MeterObservations( + timestamp=observation_datetime, + value=observation_form.reading, + observed_property_type_id=observation_form.property_type_id, + unit_id=observation_form.unit_id, + submitting_user_id=activity_form.activity_details.user_id, + meter_id=activity_form.activity_details.meter_id, + location_id=activity_location, + ose_share=share_ose_observation, + ) + ) + + notes = db.scalars( + select(NoteTypeLU).where(NoteTypeLU.id.in_(activity_form.notes.selected_note_ids)) + ).all() + meter_activity.notes = notes + + status_note_type = db.scalars( + select(NoteTypeLU).where( + NoteTypeLU.slug == activity_form.notes.working_on_arrival_slug + ) + ).first() + meter_activity.notes.append(status_note_type) + + used_parts = db.scalars( + select(Parts).where(Parts.id.in_(activity_form.part_used_ids)) + ).all() + meter_activity.parts_used = used_parts + for used_part in used_parts: + used_part.count -= 1 + + services = db.scalars( + select(ServiceTypeLU).where( + ServiceTypeLU.id.in_(activity_form.maintenance_repair.service_type_ids) + ) + ).all() + meter_activity.services_performed = services + + db.commit() + + meter_statuses = { + status.status_name: status.id for status in db.scalars(select(MeterStatusLU)).all() + } + if update_meter_state: + if activity_type.name in ["Uninstall", "Uninstall and Hold"]: + activity_meter.location_id = hq_location.id + activity_meter.well_id = None + activity_meter.water_users = None + activity_meter.status_id = ( + meter_statuses["On Hold"] + if activity_type.name == "Uninstall and Hold" + else meter_statuses["Warehouse"] + ) + if activity_type.name == "Install": + activity_meter.well_id = activity_well.id + activity_meter.location_id = activity_location + activity_meter.status_id = meter_statuses["Installed"] + activity_meter.water_users = activity_form.current_installation.water_users + if activity_type.name == "Scrap": + activity_meter.well_id = None + activity_meter.location_id = None + activity_meter.status_id = meter_statuses["Scrapped"] + activity_meter.water_users = None + activity_meter.meter_owner = None + if activity_type.name == "Sell": + activity_meter.well_id = None + activity_meter.location_id = None + activity_meter.status_id = meter_statuses["Sold"] + activity_meter.water_users = None + activity_meter.meter_owner = activity_form.current_installation.meter_owner + if activity_type.name == "Change Water Users": + activity_meter.water_users = activity_form.current_installation.water_users + + if activity_type.name != "Uninstall": + activity_meter.contact_name = activity_form.current_installation.contact_name + activity_meter.contact_phone = activity_form.current_installation.contact_phone + activity_meter.notes = activity_form.current_installation.notes + + db.commit() + + if photos: + await storage_service.save_activity_photos( + db=db, + meter_activity=meter_activity, + photos=photos, + max_photos_per_meter=max_photos_per_meter, + ) + + return meter_activity + + +def patch_activity(db: Session, patch_activity_form: meter.PatchActivity): + activity = db.scalars( + select(MeterActivities).where( + MeterActivities.id == patch_activity_form.activity_id + ) + ).first() + + activity.timestamp_start = patch_activity_form.timestamp_start + activity.timestamp_end = patch_activity_form.timestamp_end + activity.description = patch_activity_form.description + activity.ose_share = patch_activity_form.ose_share + activity.water_users = patch_activity_form.water_users + activity.location_id = ( + _get_hq_location(db).id + if patch_activity_form.location_id is None + else patch_activity_form.location_id + ) + + delete_sql = text('DELETE FROM "Notes" WHERE meter_activity_id = :activity_id') + db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) + if patch_activity_form.note_ids: + insert_sql = text( + 'INSERT INTO "Notes" (meter_activity_id, note_type_id) VALUES (:activity_id, :note_id)' + ) + for note_id in patch_activity_form.note_ids: + db.execute( + insert_sql, + {"activity_id": patch_activity_form.activity_id, "note_id": note_id}, + ) + + delete_sql = text('DELETE FROM "PartsUsed" WHERE meter_activity_id = :activity_id') + db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) + if patch_activity_form.part_ids: + insert_sql = text( + 'INSERT INTO "PartsUsed" (meter_activity_id, part_id) VALUES (:activity_id, :part_id)' + ) + for part_id in patch_activity_form.part_ids: + db.execute( + insert_sql, + {"activity_id": patch_activity_form.activity_id, "part_id": part_id}, + ) + + delete_sql = text( + 'DELETE FROM "ServicesPerformed" WHERE meter_activity_id = :activity_id' + ) + db.execute(delete_sql, {"activity_id": patch_activity_form.activity_id}) + if patch_activity_form.service_ids: + insert_sql = text( + 'INSERT INTO "ServicesPerformed" (meter_activity_id, service_type_id) VALUES (:activity_id, :service_id)' + ) + for service_id in patch_activity_form.service_ids: + db.execute( + insert_sql, + { + "activity_id": patch_activity_form.activity_id, + "service_id": service_id, + }, + ) + + db.commit() + return {"status": "success"} + + +def delete_activity(db: Session, activity_id: int): + activity = db.scalars( + select(MeterActivities).where(MeterActivities.id == activity_id) + ).first() + if not activity: + raise HTTPException(status_code=404, detail="Activity not found.") + + storage_service.delete_activity_photos(db, activity_id) + for table_name in ["Notes", "ServicesPerformed", "PartsUsed"]: + sql = text(f'DELETE FROM "{table_name}" WHERE meter_activity_id = :activity_id') + db.execute(sql, {"activity_id": activity_id}) + + db.delete(activity) + db.commit() + return {"status": "success"} + + +def patch_observation( + db: Session, patch_observation_form: meter.PatchObservation +): + observation = db.scalars( + select(MeterObservations).where( + MeterObservations.id == patch_observation_form.observation_id + ) + ).first() + + observation.timestamp = patch_observation_form.timestamp + observation.value = patch_observation_form.value + observation.notes = patch_observation_form.notes + observation.observed_property_type_id = ( + patch_observation_form.observed_property_type_id + ) + observation.unit_id = patch_observation_form.unit_id + observation.meter_id = patch_observation_form.meter_id + observation.submitting_user_id = patch_observation_form.submitting_user_id + observation.ose_share = patch_observation_form.ose_share + observation.location_id = ( + _get_hq_location(db).id + if patch_observation_form.location_id is None + else patch_observation_form.location_id + ) + + db.commit() + return {"status": "success"} + + +def delete_observation(db: Session, observation_id: int): + observation = db.scalars( + select(MeterObservations).where(MeterObservations.id == observation_id) + ).first() + if not observation: + raise HTTPException(status_code=404, detail="Observation not found.") + db.delete(observation) + db.commit() + return {"status": "success"} + + +def get_activity_types(db: Session, user: Users): + if user.user_role.name not in ["Admin", "Technician"]: + return [] + + activities = db.scalars(select(ActivityTypeLU)).all() + if user.user_role.name != "Admin": + return [ + activity for activity in activities if activity.name not in ["Sell", "Scrap"] + ] + return activities + + +def get_users(db: Session): + return db.scalars( + select(Users).options(undefer(Users.user_role_id)).where(Users.disabled == False) + ).all() + + +def get_units(db: Session): + return db.scalars(select(Units)).all() + + +def get_observed_property_types(db: Session): + return ( + db.scalars( + select(ObservedPropertyTypeLU).options(joinedload(ObservedPropertyTypeLU.units)) + ) + .unique() + .all() + ) + + +def get_service_types(db: Session): + return db.scalars(select(ServiceTypeLU)).all() + + +def get_note_types(db: Session): + return db.scalars(select(NoteTypeLU)).all() diff --git a/api/services/meters.py b/api/services/meters.py new file mode 100644 index 00000000..61f22ce4 --- /dev/null +++ b/api/services/meters.py @@ -0,0 +1,98 @@ +from enum import Enum + +from sqlalchemy import select +from sqlalchemy.orm import Session, joinedload + +from api.models.meter import MeterActivities, MeterObservations +from api.models.part import Parts, PartsUsed +from api.models.well import Wells +from api.services.storage import create_signed_url + + +class HistoryType(Enum): + Activity = "Activity" + Observation = "Observation" + LocationChange = "LocationChange" + + +def get_meter_history(db: Session, meter_id: int): + activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.location), + joinedload(MeterActivities.submitting_user), + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used_links) + .joinedload(PartsUsed.part) + .joinedload(Parts.part_type), + joinedload(MeterActivities.notes), + joinedload(MeterActivities.services_performed), + ) + .filter(MeterActivities.meter_id == meter_id) + ) + .unique() + .all() + ) + + observations = db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.submitting_user), + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + joinedload(MeterObservations.location), + ) + .filter(MeterObservations.meter_id == meter_id) + ).all() + + formatted_history_items = [] + item_id = 0 + + for activity in activities: + activity.location.geom = None + activity_well = db.scalars( + select(Wells).where(Wells.location_id == activity.location_id) + ).first() + photos = [ + { + "id": photo.id, + "file_name": photo.file_name, + "url": create_signed_url(photo.gcs_path), + "uploaded_at": photo.uploaded_at, + } + for photo in activity.photos + ] + formatted_history_items.append( + { + "id": item_id, + "history_type": HistoryType.Activity, + "well": activity_well, + "location": activity.location, + "activity_type": activity.activity_type_id, + "date": activity.timestamp_start, + "history_item": activity, + "photos": photos, + } + ) + item_id += 1 + + for observation in observations: + observation.location.geom = None + observation_well = db.scalars( + select(Wells).where(Wells.location_id == observation.location_id) + ).first() + formatted_history_items.append( + { + "id": item_id, + "history_type": HistoryType.Observation, + "well": observation_well, + "location": observation.location, + "date": observation.timestamp, + "history_item": observation, + } + ) + item_id += 1 + + formatted_history_items.sort(key=lambda item: item["date"], reverse=True) + return formatted_history_items diff --git a/api/services/ose.py b/api/services/ose.py new file mode 100644 index 00000000..1bfff605 --- /dev/null +++ b/api/services/ose.py @@ -0,0 +1,380 @@ +from datetime import datetime +import os + +from fastapi import HTTPException +from sqlalchemy import and_, select +from sqlalchemy.orm import Session, joinedload, selectinload + +from api.models.meter import ( + ActivityTypeLU, + MeterActivities, + MeterObservations, + MeterStatusLU, + Meters, + NoteTypeLU, + ObservedPropertyTypeLU, + ServiceTypeLU, + meterRegisters, +) +from api.models.well import Wells +from api.models.work_order import workOrders +from api.schemas import meter, ose + + +API_BASE_URL = os.getenv("API_BASE_URL", "") + + +def build_activity_photo_url(activity_id: int, photo_name: str) -> str: + return f"{API_BASE_URL}/activities/{activity_id}/photos/{photo_name}" + + +def _get_observations_for_activity( + activity_start: datetime, + activity_end: datetime, + meter_id: int, + observations: list[MeterObservations], +) -> list[ose.ObservationDTO]: + observation_list = [] + for observation in observations: + if ( + observation.timestamp >= activity_start + and observation.timestamp <= activity_end + and observation.meter_id == meter_id + ): + observation_list.append( + ose.ObservationDTO( + observation_time=observation.timestamp.time(), + observation_type=observation.observed_property.name, + measurement=observation.value, + units=observation.unit.name_short, + ) + ) + return observation_list + + +def _serialize_activity( + activity: MeterActivities, observations: list[MeterObservations] +) -> ose.ActivityDTO: + notes_strings = [note.note for note in activity.notes] + parts_used_strings = [ + f"{part.part_type.name} ({part.part_number})" for part in activity.parts_used + ] + services_performed_strings = [ + service.service_name for service in activity.services_performed + ] + activity_observations = _get_observations_for_activity( + activity.timestamp_start, + activity.timestamp_end, + activity.meter_id, + observations, + ) + well_ra_number = activity.well.ra_number if activity.well else None + well_ose_tag = activity.well.osetag if activity.well else None + meter_activity_photos = [ + ose.MeterActivityPhotoDTO( + name=photo.file_name, + url=build_activity_photo_url(activity.id, photo.file_name), + ) + for photo in (activity.photos or []) + ] + + return ose.ActivityDTO( + activity_id=activity.id, + ose_request_id=activity.work_order.ose_request_id if activity.work_order else None, + activity_type=activity.activity_type.name, + activity_start=activity.timestamp_start, + activity_end=activity.timestamp_end, + well_ra_number=well_ra_number, + well_ose_tag=well_ose_tag, + description=activity.description, + services=services_performed_strings, + notes=notes_strings, + parts_used=parts_used_strings, + observations=activity_observations, + meter_activity_photos=meter_activity_photos, + ) + + +def reorganize_history( + activities: list[MeterActivities], observations: list[MeterObservations] +) -> list[ose.DateHistoryDTO]: + history: dict[str, dict[str, list[MeterActivities]]] = {} + for activity in activities: + activity_date = activity.timestamp_start.strftime("%Y-%m-%d") + meter_serial = activity.meter.serial_number + history.setdefault(activity_date, {}).setdefault(meter_serial, []).append(activity) + + history_list: list[ose.DateHistoryDTO] = [] + for activity_date, meters in history.items(): + meter_history_list = [] + for meter_serial, meter_activities in meters.items(): + meter_history_list.append( + ose.MeterHistoryDTO( + serial_number=meter_serial, + activities=[ + _serialize_activity(activity, observations) + for activity in meter_activities + ], + ) + ) + history_list.append( + ose.DateHistoryDTO(date=activity_date, meters=meter_history_list) + ) + + return history_list + + +def get_shared_history( + db: Session, start_datetime: datetime, end_datetime: datetime +) -> list[ose.DateHistoryDTO]: + activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used), + joinedload(MeterActivities.meter), + joinedload(MeterActivities.work_order), + joinedload(MeterActivities.well), + selectinload(MeterActivities.photos), + ) + .filter( + and_( + MeterActivities.timestamp_end >= start_datetime, + MeterActivities.timestamp_end <= end_datetime, + MeterActivities.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + observations = ( + db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + joinedload(MeterObservations.meter), + ) + .filter( + and_( + MeterObservations.timestamp >= start_datetime, + MeterObservations.timestamp <= end_datetime, + MeterObservations.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + return reorganize_history(list(activities), list(observations)) + + +def get_maintenance_by_request_ids( + db: Session, ose_request_ids: list[int] | None +) -> list[ose.DateHistoryDTO]: + activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used), + joinedload(MeterActivities.meter).joinedload(Meters.well), + joinedload(MeterActivities.work_order), + selectinload(MeterActivities.photos), + ) + .join(workOrders) + .where( + and_( + workOrders.ose_request_id.in_(ose_request_ids), + MeterActivities.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + activities_list = list(activities) + if not activities_list: + return [] + + activities_start_date = min(activity.timestamp_start for activity in activities_list) + activities_end_date = max(activity.timestamp_end for activity in activities_list) + observations = ( + db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + joinedload(MeterObservations.meter), + ) + .filter( + and_( + MeterObservations.timestamp >= activities_start_date, + MeterObservations.timestamp <= activities_end_date, + MeterObservations.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + + return reorganize_history(activities_list, list(observations)) + + +def get_meter_information(db: Session, serial_number: str) -> meter.PublicMeter: + query = select(Meters).options( + joinedload(Meters.meter_type), + joinedload(Meters.well).joinedload(Wells.location), + joinedload(Meters.status), + joinedload(Meters.meter_register).joinedload(meterRegisters.dial_units), + joinedload(Meters.meter_register).joinedload(meterRegisters.totalizer_units), + ) + meter = db.scalars(query.filter(Meters.serial_number == serial_number)).first() + + if not meter: + raise HTTPException(status_code=404, detail="Meter not found") + + return meter.PublicMeter( + serial_number=meter.serial_number, + status=meter.status.status_name, + well=meter.PublicMeter.PublicWell( + ra_number=meter.well.ra_number, + osetag=meter.well.osetag, + trss=meter.well.location.trss, + longitude=meter.well.location.longitude, + latitude=meter.well.location.latitude, + ) + if meter.well + else None, + notes=meter.notes, + meter_type=meter.PublicMeter.MeterType( + brand=meter.meter_type.brand, + model=meter.meter_type.model, + size=meter.meter_type.size, + ), + meter_register=meter.PublicMeter.MeterRegister( + ratio=meter.meter_register.ratio, + number_of_digits=meter.meter_register.number_of_digits, + decimal_digits=meter.meter_register.decimal_digits, + dial_units=meter.meter_register.dial_units.name, + totalizer_units=meter.meter_register.totalizer_units.name, + multiplier=meter.meter_register.multiplier, + ) + if meter.meter_register + else None, + ) + + +def get_disapproval_response( + db: Session, ose_request_id: int +) -> ose.DisapprovalStatus: + work_order = db.scalars( + select(workOrders) + .options(joinedload(workOrders.status)) + .where(workOrders.ose_request_id == ose_request_id) + ).first() + + if not work_order or not work_order.title.startswith("OSE Data Issue"): + raise HTTPException(status_code=404, detail="Work order not found") + + disapproval_activity = ose.ActivityDTO( + activity_id=99999, + activity_type="Disapproval", + activity_start=datetime.now(), + activity_end=datetime.now(), + well_ra_number=None, + well_ose_tag=None, + description="Not yet implemented, need activity ID in disapproval", + services=[], + notes=[], + parts_used=[], + observations=[], + ) + + new_activities = ( + db.scalars( + select(MeterActivities) + .options( + joinedload(MeterActivities.activity_type), + joinedload(MeterActivities.parts_used), + joinedload(MeterActivities.meter).joinedload(Meters.well), + joinedload(MeterActivities.work_order), + selectinload(MeterActivities.photos), + ) + .where(MeterActivities.work_order_id == work_order.id) + ) + .unique() + .all() + ) + + new_activities_dto = [] + for activity in new_activities: + observations = ( + db.scalars( + select(MeterObservations) + .options( + joinedload(MeterObservations.observed_property), + joinedload(MeterObservations.unit), + ) + .filter( + and_( + MeterObservations.timestamp >= activity.timestamp_start, + MeterObservations.timestamp <= activity.timestamp_end, + MeterObservations.meter_id == activity.meter_id, + MeterObservations.ose_share == True, + ) + ) + ) + .unique() + .all() + ) + new_activities_dto.append(_serialize_activity(activity, list(observations))) + + return ose.DisapprovalStatus( + ose_request_id=work_order.ose_request_id, + status=work_order.status.name, + notes=work_order.notes, + disapproval_activity=disapproval_activity, + new_activities=new_activities_dto, + ) + + +def get_db_types(db: Session) -> meter.DBTypesForOSE: + return meter.DBTypesForOSE( + activity_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.name, description=item.description + ) + for item in db.scalars(select(ActivityTypeLU)).all() + ], + observed_property_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.name, description=item.description + ) + for item in db.scalars(select(ObservedPropertyTypeLU)).all() + ], + service_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.service_name, description=item.description + ) + for item in db.scalars(select(ServiceTypeLU)).all() + ], + note_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.note, description=item.details + ) + for item in db.scalars(select(NoteTypeLU)).all() + ], + meter_status_types=[ + meter.DBTypesForOSE.GeneralTypeInfo( + name=item.status_name, description=item.description + ) + for item in db.scalars(select(MeterStatusLU)).all() + ], + ) diff --git a/api/services/parts.py b/api/services/parts.py new file mode 100644 index 00000000..fddccfc4 --- /dev/null +++ b/api/services/parts.py @@ -0,0 +1,346 @@ +from datetime import date, datetime, time +from io import BytesIO +from pathlib import Path +from typing import Optional + +from fastapi import HTTPException +from jinja2 import Environment, FileSystemLoader, select_autoescape +from sqlalchemy import func, literal, select, union_all +from sqlalchemy.orm import Session, selectinload +from weasyprint import HTML + +from api.models.meter import MeterActivities, meterRegisters +from api.models.part import Parts, PartsAdded, PartsUsed +from api.schemas import parts + + +TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" +templates = Environment( + loader=FileSystemLoader(TEMPLATES_DIR), + autoescape=select_autoescape(["html", "xml"]), +) + + +def _part_count_subqueries(): + used_subq = ( + select( + PartsUsed.part_id.label("part_id"), + func.coalesce(func.sum(PartsUsed.count), 0).label("used_sum"), + ) + .group_by(PartsUsed.part_id) + .subquery() + ) + added_subq = ( + select( + PartsAdded.part_id.label("part_id"), + func.coalesce(func.sum(PartsAdded.count), 0).label("added_sum"), + ) + .group_by(PartsAdded.part_id) + .subquery() + ) + current_count = ( + Parts.initial_count + + func.coalesce(added_subq.c.added_sum, 0) + - func.coalesce(used_subq.c.used_sum, 0) + ).label("current_count") + return used_subq, added_subq, current_count + + +def build_part_history_response( + part_id: int, db: Session +) -> parts.PartHistoryResponse: + part = db.scalars(select(Parts).where(Parts.id == part_id)).first() + if not part: + raise HTTPException(status_code=404, detail="Part not found") + + added_q = select( + PartsAdded.id.label("ref_id"), + PartsAdded.part_id.label("part_id"), + PartsAdded.date.label("event_date"), + literal("added").label("event_type"), + PartsAdded.note.label("note"), + PartsAdded.count.label("delta"), + literal(None).label("work_order_id"), + ).where(PartsAdded.part_id == part_id) + + used_q = ( + select( + PartsUsed.id.label("ref_id"), + PartsUsed.part_id.label("part_id"), + MeterActivities.timestamp_start.label("event_date"), + literal("used").label("event_type"), + func.nullif(func.trim(MeterActivities.description), "").label("note"), + (-PartsUsed.count).label("delta"), + MeterActivities.work_order_id.label("work_order_id"), + ) + .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) + .where(PartsUsed.part_id == part_id) + ) + + events = union_all(added_q, used_q).subquery() + rows = db.execute( + select( + events.c.ref_id, + events.c.part_id, + events.c.event_date, + events.c.event_type, + events.c.note, + events.c.delta, + events.c.work_order_id, + ).order_by(events.c.event_date.asc(), events.c.ref_id.asc()) + ).all() + + running = int(part.initial_count) + history: list[parts.PartHistoryRow] = [ + parts.PartHistoryRow( + row_id=f"initial-{part_id}", + part_id=part_id, + event_date=datetime.min, + event_type="initial", + ref_id=None, + note="Initial count", + delta=0, + total_after=running, + work_order_id=None, + ) + ] + + for ref_id, pid, event_date, event_type, note, delta, work_order_id in rows: + if not isinstance(event_date, datetime): + event_date = datetime.combine(event_date, time.min) + running += int(delta) + history.append( + parts.PartHistoryRow( + row_id=f"{event_type}-{ref_id}", + part_id=pid, + event_date=event_date, + event_type=event_type, + ref_id=ref_id, + note=note, + delta=int(delta), + total_after=running, + work_order_id=work_order_id, + ) + ) + + return parts.PartHistoryResponse( + part_id=part.id, + part_number=part.part_number, + initial_count=part.initial_count, + current_count=running, + history=history, + ) + + +def list_parts(db: Session, in_use: Optional[bool] = None): + used_subq, added_subq, current_count = _part_count_subqueries() + stmt = ( + select(Parts, current_count) + .outerjoin(used_subq, used_subq.c.part_id == Parts.id) + .outerjoin(added_subq, added_subq.c.part_id == Parts.id) + .options(selectinload(Parts.part_type)) + ) + if in_use is not None: + stmt = stmt.where(Parts.in_use == in_use) + rows = db.execute(stmt).all() + results = [] + for part, curr in rows: + part.current_count = curr + results.append(part) + return results + + +def get_parts_used_summary(db: Session, from_date: date, to_date: date, parts: list[int]): + start_dt = datetime.combine(from_date, datetime.min.time()) + end_dt = datetime.combine(to_date, datetime.max.time()) + usage_subq = ( + db.query( + PartsUsed.part_id.label("used_part_id"), + func.coalesce(func.sum(PartsUsed.count), 0).label("quantity"), + ) + .join(MeterActivities, MeterActivities.id == PartsUsed.meter_activity_id) + .filter( + MeterActivities.timestamp_start >= start_dt, + MeterActivities.timestamp_start <= end_dt, + PartsUsed.part_id.in_(parts), + ) + .group_by(PartsUsed.part_id) + .subquery() + ) + query = ( + db.query( + Parts.id.label("id"), + Parts.part_number, + Parts.description, + Parts.price, + func.coalesce(usage_subq.c.quantity, 0).label("quantity"), + ) + .outerjoin(usage_subq, Parts.id == usage_subq.c.used_part_id) + .filter(Parts.id.in_(parts)) + .order_by(Parts.part_number) + ) + results = [] + for row in query.all(): + price = float(row.price or 0) + quantity = int(row.quantity or 0) + results.append( + { + "id": row.id, + "part_number": row.part_number, + "description": row.description, + "price": price, + "quantity": quantity, + "total": price * quantity, + } + ) + return results + + +def build_parts_used_pdf(db: Session, from_date: date, to_date: date, parts: list[int]): + results = get_parts_used_summary(db, from_date, to_date, parts) + running_total = 0.0 + for row in results: + running_total += row["total"] + row["running_total"] = running_total + + html_content = templates.get_template("parts_used_report.html").render( + rows=results, + from_date=from_date, + to_date=to_date, + ) + pdf_io = BytesIO() + HTML(string=html_content).write_pdf(pdf_io) + pdf_io.seek(0) + return pdf_io + + +def get_part(db: Session, part_id: int): + used_subq, added_subq, current_count = _part_count_subqueries() + row = db.execute( + select(Parts, current_count) + .outerjoin(used_subq, used_subq.c.part_id == Parts.id) + .outerjoin(added_subq, added_subq.c.part_id == Parts.id) + .where(Parts.id == part_id) + .options(selectinload(Parts.part_type), selectinload(Parts.meter_types)) + ).first() + if not row: + return None + + selected_part, curr = row + selected_part.current_count = curr + returned_part = parts.Part.model_validate(selected_part) + + if selected_part.part_type.name == "Register": + register_details = db.scalars( + select(meterRegisters).where(meterRegisters.part_id == selected_part.id) + ).first() + register_details_obj = None + if register_details is not None: + register_details_obj = ( + parts.Register.register_details.model_validate(register_details) + ) + returned_part = parts.Register( + **returned_part.model_dump(exclude_unset=True), + register_settings=register_details_obj, + ) + return returned_part + + +def add_parts(db: Session, payload: parts.PartsAddRequest): + part = db.scalars(select(Parts).where(Parts.id == payload.part_id)).first() + if not part: + raise HTTPException(status_code=404, detail="Part not found") + + db.add( + PartsAdded( + part_id=payload.part_id, + count=payload.count, + date=payload.date, + note=payload.note, + ) + ) + db.commit() + + used_subq, added_subq, current_count = _part_count_subqueries() + row = db.execute( + select(Parts, current_count) + .outerjoin(used_subq, used_subq.c.part_id == Parts.id) + .outerjoin(added_subq, added_subq.c.part_id == Parts.id) + .where(Parts.id == payload.part_id) + .options(selectinload(Parts.part_type), selectinload(Parts.meter_types)) + ).first() + if not row: + raise HTTPException(status_code=404, detail="Part not found") + part_obj, curr = row + part_obj.current_count = curr + return part_obj + + +def patch_part_history( + db: Session, part_id: int, payload: parts.PartHistoryUpdateRequest +): + part = db.scalars(select(Parts).where(Parts.id == part_id)).first() + if not part: + raise HTTPException(status_code=404, detail="Part not found") + + for row in payload.rows: + normalized_note = row.note.strip() if row.note else None + if normalized_note == "": + normalized_note = None + + if row.event_type == "added": + if row.delta <= 0: + raise HTTPException( + status_code=422, + detail="Added parts rows must have a positive change.", + ) + added_row = db.scalars( + select(PartsAdded).where( + PartsAdded.id == row.ref_id, + PartsAdded.part_id == part_id, + ) + ).first() + if not added_row: + raise HTTPException(status_code=404, detail="Parts added row not found.") + added_row.count = row.delta + added_row.date = row.event_date.date() + added_row.note = normalized_note + continue + + if row.delta >= 0: + raise HTTPException( + status_code=422, + detail="Work order rows must have a negative change.", + ) + parts_used_row = db.scalars( + select(PartsUsed).where( + PartsUsed.id == row.ref_id, + PartsUsed.part_id == part_id, + ) + ).first() + if not parts_used_row: + raise HTTPException(status_code=404, detail="Parts used row not found.") + activity = db.scalars( + select(MeterActivities).where( + MeterActivities.id == parts_used_row.meter_activity_id + ) + ).first() + if not activity: + raise HTTPException( + status_code=404, + detail="Meter activity for parts used row not found.", + ) + duration = ( + activity.timestamp_end - activity.timestamp_start + if activity.timestamp_end and activity.timestamp_start + else None + ) + parts_used_row.count = abs(row.delta) + activity.timestamp_start = row.event_date + activity.description = normalized_note + activity.timestamp_end = ( + row.event_date + duration if duration is not None else row.event_date + ) + + db.commit() + return build_part_history_response(part_id, db) diff --git a/api/services/storage.py b/api/services/storage.py new file mode 100644 index 00000000..3cb2f9ff --- /dev/null +++ b/api/services/storage.py @@ -0,0 +1,154 @@ +import os +import uuid +from datetime import timedelta +from pathlib import Path + +from fastapi import HTTPException, UploadFile +from google.auth import default, impersonated_credentials +from google.cloud import storage +from sqlalchemy import select +from sqlalchemy.orm import Session + +from api.models.meter import MeterActivities, MeterActivityPhotos + + +BUCKET_NAME = os.getenv("GCP_BUCKET_NAME", "") +PHOTO_PREFIX = os.getenv("GCP_PHOTO_PREFIX", "") +PHOTO_JWT_EXPIRE_SECONDS = 600 +TARGET_SERVICE_ACCOUNT = ( + "pvacd-meterapp@waterdatainitiative-271000.iam.gserviceaccount.com" +) + + +def get_activity_photo_record( + db: Session, activity_id: int, photo_file_name: str +) -> MeterActivityPhotos: + photo = ( + db.query(MeterActivityPhotos) + .filter( + MeterActivityPhotos.meter_activity_id == activity_id, + MeterActivityPhotos.file_name == photo_file_name, + ) + .first() + ) + + if not photo: + raise HTTPException(status_code=404, detail="Photo not found for this activity") + + return photo + + +def open_activity_photo(photo: MeterActivityPhotos): + try: + client = storage.Client() + bucket = client.bucket(BUCKET_NAME) + blob = bucket.blob(photo.gcs_path) + + if not blob.exists(client=client): + raise HTTPException(status_code=404, detail="Photo file missing from storage") + + blob.reload(client=client) + content_type = blob.content_type or "application/octet-stream" + headers = {"Content-Disposition": f'inline; filename="{photo.file_name}"'} + return blob.open("rb"), content_type, headers + except HTTPException: + raise + except Exception: + raise HTTPException(status_code=500, detail="Failed to retrieve photo") + + +async def save_activity_photos( + db: Session, + meter_activity: MeterActivities, + photos: list[UploadFile], + max_photos_per_meter: int, +): + if not photos: + return + + bucket = storage.Client().bucket(BUCKET_NAME) + + for file in photos: + ext = Path(file.filename).suffix or ".jpg" + unique_name = f"{uuid.uuid4()}{ext}" + blob_path = f"{PHOTO_PREFIX}/{meter_activity.id}/{unique_name}" + blob = bucket.blob(blob_path) + contents = await file.read() + blob.upload_from_string(contents, content_type=file.content_type) + + db.add( + MeterActivityPhotos( + meter_activity_id=meter_activity.id, + file_name=unique_name, + gcs_path=blob_path, + ) + ) + + db.commit() + db.refresh(meter_activity) + enforce_activity_photo_retention( + db=db, + meter_id=meter_activity.meter_id, + max_photos_per_meter=max_photos_per_meter, + bucket=bucket, + ) + + +def enforce_activity_photo_retention( + db: Session, + meter_id: int, + max_photos_per_meter: int, + bucket=None, +): + all_photos = ( + db.query(MeterActivityPhotos) + .join(MeterActivities) + .filter(MeterActivities.meter_id == meter_id) + .order_by(MeterActivityPhotos.uploaded_at.desc()) + .all() + ) + + if len(all_photos) <= max_photos_per_meter: + return + + bucket = bucket or storage.Client().bucket(BUCKET_NAME) + for photo in all_photos[max_photos_per_meter:]: + try: + bucket.blob(photo.gcs_path).delete() + except Exception as exc: + print(f"Warning: failed to delete {photo.gcs_path} from GCS: {exc}") + db.delete(photo) + + db.commit() + + +def delete_activity_photos(db: Session, activity_id: int): + photos = db.scalars( + select(MeterActivityPhotos).where( + MeterActivityPhotos.meter_activity_id == activity_id + ) + ).all() + + bucket = storage.Client().bucket(BUCKET_NAME) + for photo in photos: + try: + bucket.blob(photo.gcs_path).delete() + except Exception as exc: + print(f"Failed to delete {photo.gcs_path} from bucket: {exc}") + + +def create_signed_url(blob_path: str) -> str: + source_creds, _ = default() + creds = impersonated_credentials.Credentials( + source_credentials=source_creds, + target_principal=TARGET_SERVICE_ACCOUNT, + target_scopes=["https://www.googleapis.com/auth/devstorage.read_only"], + lifetime=3600, + ) + storage_client = storage.Client(credentials=creds) + blob = storage_client.bucket(BUCKET_NAME).blob(blob_path) + return blob.generate_signed_url( + version="v4", + expiration=timedelta(seconds=PHOTO_JWT_EXPIRE_SECONDS), + method="GET", + ) diff --git a/api/services/well_measurements.py b/api/services/well_measurements.py new file mode 100644 index 00000000..a170706c --- /dev/null +++ b/api/services/well_measurements.py @@ -0,0 +1,467 @@ +from base64 import b64encode +from collections import defaultdict +from datetime import date, datetime +from io import BytesIO +from pathlib import Path +from typing import Any, Dict, Optional +from zoneinfo import ZoneInfo +import calendar +import json +import os +import re +import zlib + +import matplotlib +from google.cloud import storage +from jinja2 import Environment, FileSystemLoader, select_autoescape +from matplotlib.pyplot import close, figure +from sqlalchemy import and_, func, select +from sqlalchemy.orm import Session, joinedload +from weasyprint import HTML + +from api.models.meter import ObservedPropertyTypeLU +from api.models.well import WellMeasurements, Wells +from api.schemas import well + + +matplotlib.use("Agg") + +WOODPECKER_BUCKET_NAME = os.getenv("GCP_WOODPECKER_BUCKET_NAME", "") +TEMPLATES_DIR = Path(__file__).resolve().parent.parent / "templates" + +templates = Environment( + loader=FileSystemLoader(TEMPLATES_DIR), + autoescape=select_autoescape(["html", "xml"]), +) + +SP_JOHNSON_WELL_ID = 2599 +DEPTH_TO_WATER_SENSOR_NAME = "Depth to Water" +MONITORING_USE_TYPE_ID = 11 + + +def read_woodpecker_waterlevels( + well_id: int, +) -> list[well.WellMeasurementDTO]: + if well_id != SP_JOHNSON_WELL_ID: + raise ValueError("Invalid well ID") + + results: list[well.WellMeasurementDTO] = [] + seen_timestamps: set[str] = set() + + client = storage.Client() + bucket = client.bucket(WOODPECKER_BUCKET_NAME) + + for blob in bucket.list_blobs(): + if not blob.name.endswith(".json"): + continue + + payload = json.loads(blob.download_as_text()) + device_attributes = payload.get("deviceAttributes") or {} + tz_name = device_attributes.get("timeZone") or "UTC" + ra_number = device_attributes.get("wellId") or "" + + sensor_data = payload.get("sensorData") or [] + depth_sensor = next( + ( + sensor + for sensor in sensor_data + if (sensor.get("sensorName") or "").strip() == DEPTH_TO_WATER_SENSOR_NAME + ), + None, + ) + if not depth_sensor: + continue + + for measurement in depth_sensor.get("measurements") or []: + raw_ts = measurement.get("timestamp") + if not raw_ts: + continue + + ts = _parse_woodpecker_timestamp(raw_ts, tz_name) + ts_key = ts.isoformat() + if ts_key in seen_timestamps: + continue + seen_timestamps.add(ts_key) + + raw_value = measurement.get("data") + value = abs(raw_value) if raw_value is not None else None + + results.append( + well.WellMeasurementDTO( + id=_make_measurement_id(well_id, ts, value), + timestamp=ts, + value=value, + submitting_user=well.WellMeasurementDTO.UserDTO( + full_name="Woodpeckers" + ), + well=well.WellMeasurementDTO.WellDTO(ra_number=ra_number), + ) + ) + + results.sort(key=lambda item: item.timestamp) + return results + + +def _parse_woodpecker_timestamp(ts: str, tz_name: str) -> datetime: + dt_naive = datetime.strptime(ts, "%d/%m/%Y %H:%M:%S") + try: + tz = ZoneInfo(tz_name) + except Exception: + return dt_naive + return dt_naive.replace(tzinfo=tz) + + +def _make_measurement_id(well_id: int, ts: datetime, value: Optional[float]) -> int: + key = f"{well_id}|{ts.isoformat()}|{value if value is not None else 'null'}" + return zlib.crc32(key.encode("utf-8")) + + +def _group_and_average( + measurements: list[well.WellMeasurementDTO], + group_by_label: str, + ra_number: str, + synthetic_id_counter: int, +) -> tuple[list[well.WellMeasurementDTO], int]: + grouped: dict[str, list[float]] = defaultdict(list) + for measurement in measurements: + key = measurement.timestamp.strftime( + "%Y-%m" if group_by_label == "month" else "%Y-%m-%d" + ) + grouped[key].append(measurement.value) + + results: list[well.WellMeasurementDTO] = [] + for time_str, values in sorted(grouped.items()): + dt = datetime.strptime( + time_str, + "%Y-%m" if group_by_label == "month" else "%Y-%m-%d", + ) + results.append( + well.WellMeasurementDTO( + id=synthetic_id_counter, + timestamp=dt, + value=sum(values) / len(values), + submitting_user={"full_name": "System"}, + well={"ra_number": ra_number}, + ) + ) + synthetic_id_counter -= 1 + + return results, synthetic_id_counter + + +def _get_measurements_by_ids( + db: Session, + well_ids: list[int], + start: Optional[date], + end: Optional[date], +): + filters = [ + ObservedPropertyTypeLU.name == "Depth to water", + WellMeasurements.well_id.in_(well_ids), + ] + if start: + filters.append(WellMeasurements.timestamp >= start) + if end: + filters.append(WellMeasurements.timestamp <= datetime.combine(end, datetime.max.time())) + + stmt = ( + select(WellMeasurements) + .options( + joinedload(WellMeasurements.submitting_user), + joinedload(WellMeasurements.well), + ) + .join(ObservedPropertyTypeLU) + .where(and_(*filters)) + .order_by(WellMeasurements.well_id, WellMeasurements.timestamp) + ) + return db.scalars(stmt).all() + + +def read_waterlevels( + *, + db: Session, + well_ids: list[int], + from_date: Optional[date], + to_date: Optional[date], + is_averaging_all_wells: bool, + is_comparing_to_1970_average: bool, + comparison_year: Optional[str], +) -> list[well.WellMeasurementDTO]: + synthetic_id_counter = -1 + + group_by = None + if from_date and to_date: + group_by = "month" if (to_date - from_date).days >= 365 else "day" + + if not well_ids and not is_comparing_to_1970_average and not comparison_year: + return [] + + response_data: list[well.WellMeasurementDTO] = [] + + if is_averaging_all_wells and well_ids: + current_measurements = _get_measurements_by_ids(db, well_ids, from_date, to_date) + averaged, synthetic_id_counter = _group_and_average( + current_measurements, + group_by or "day", + "Average of wells", + synthetic_id_counter, + ) + response_data.extend(averaged) + + if not is_averaging_all_wells and well_ids: + response_data.extend(_get_measurements_by_ids(db, well_ids, from_date, to_date)) + + def add_year_average(year: int, label: str): + nonlocal synthetic_id_counter + if from_date and to_date and (to_date - from_date).days >= 365: + start = date(year, 1, 1) + end = date(year, 12, 31) + elif from_date and to_date: + start = date(year, from_date.month, 1) + end = date(year, to_date.month, calendar.monthrange(year, to_date.month)[1]) + else: + start = date(year, 1, 1) + end = date(year, 12, 31) + + monitoring_ids = [ + row[0] + for row in db.execute( + select(Wells.id).where(Wells.use_type_id == MONITORING_USE_TYPE_ID) + ).all() + ] + year_measurements = _get_measurements_by_ids(db, monitoring_ids, start, end) + averaged, synthetic_id_counter = _group_and_average( + year_measurements, + "month", + label, + synthetic_id_counter, + ) + response_data.extend(averaged) + + if is_comparing_to_1970_average: + add_year_average(1970, "1970 Average") + + if comparison_year: + try: + year_int = int(comparison_year) + except ValueError: + raise ValueError("comparisonYear must be a 4-digit year") + + current_year = datetime.now().year + if year_int < 1900 or year_int > current_year: + raise ValueError(f"comparisonYear must be between 1900 and {current_year}") + + if not (is_comparing_to_1970_average and year_int == 1970): + add_year_average(year_int, f"{year_int} Average") + + return response_data + + +def get_waterlevel_report_averages( + *, + well_ids: list[int], + from_date: Optional[date], + to_date: Optional[date], + db: Session, +) -> Dict[str, Any]: + if not well_ids: + return {"bucket": None, "per_well": [], "all_wells": []} + + if from_date is None and to_date is None: + return {"bucket": None, "per_well": [], "all_wells": []} + + start_dt = datetime.combine(from_date, datetime.min.time()) if from_date else None + end_dt = datetime.combine(to_date, datetime.max.time()) if to_date else None + + if from_date and to_date: + bucket_unit = "year" if (to_date - from_date).days >= 365 else "month" + else: + bucket_unit = "month" + + bucket = func.date_trunc(bucket_unit, WellMeasurements.timestamp).label("period_start") + base_filters = [ + ObservedPropertyTypeLU.name == "Depth to water", + WellMeasurements.well_id.in_(well_ids), + ] + if start_dt: + base_filters.append(WellMeasurements.timestamp >= start_dt) + if end_dt: + base_filters.append(WellMeasurements.timestamp <= end_dt) + + per_well_stmt = ( + select( + WellMeasurements.well_id.label("well_id"), + Wells.ra_number.label("ra_number"), + bucket, + func.avg(WellMeasurements.value).label("avg_value"), + ) + .join(Wells, Wells.id == WellMeasurements.well_id) + .join( + ObservedPropertyTypeLU, + ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, + ) + .where(and_(*base_filters)) + .group_by(WellMeasurements.well_id, Wells.ra_number, bucket) + .order_by(Wells.ra_number, bucket) + ) + + all_wells_stmt = ( + select(bucket, func.avg(WellMeasurements.value).label("avg_value")) + .join( + ObservedPropertyTypeLU, + ObservedPropertyTypeLU.id == WellMeasurements.observed_property_id, + ) + .where(and_(*base_filters)) + .group_by(bucket) + .order_by(bucket) + ) + + return { + "bucket": bucket_unit, + "per_well": [ + { + "well_id": row.well_id, + "ra_number": row.ra_number, + "period_start": row.period_start, + "avg_value": float(row.avg_value) if row.avg_value is not None else None, + } + for row in db.execute(per_well_stmt).all() + ], + "all_wells": [ + { + "period_start": row.period_start, + "avg_value": float(row.avg_value) if row.avg_value is not None else None, + } + for row in db.execute(all_wells_stmt).all() + ], + } + + +def build_waterlevels_pdf( + *, + db: Session, + well_ids: list[int], + from_date: date, + to_date: date, + is_averaging_all_wells: bool, + is_comparing_to_1970_average: bool, + comparison_year: Optional[str], +) -> BytesIO: + data = read_waterlevels( + db=db, + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + is_averaging_all_wells=is_averaging_all_wells, + is_comparing_to_1970_average=is_comparing_to_1970_average, + comparison_year=comparison_year, + ) + if not data: + raise LookupError("No water-level data found") + + from_year = from_date.year + shift_years = set() + if is_comparing_to_1970_average: + shift_years.add(1970) + if comparison_year: + try: + shift_years.add(int(comparison_year)) + except ValueError: + pass + + rows = [] + data_by_well = defaultdict(list) + for measurement in data: + ts = measurement.timestamp + value = measurement.value + ra_number = ( + measurement.well["ra_number"] + if isinstance(measurement.well, dict) + else measurement.well.ra_number + ) + + rows.append( + { + "timestamp": ts.strftime("%Y-%m-%d %H:%M"), + "depth_to_water": value, + "well_ra_number": ra_number, + } + ) + + chart_ts = ts + if from_year: + match = re.match(r"^(\d{4}) Average$", ra_number) + if match: + year = int(match.group(1)) + if year in shift_years: + chart_ts = _shift_year_safe(ts, from_year) + + data_by_well[ra_number].append((chart_ts, value)) + + chart_b64 = _make_line_chart(data_by_well, "Depth of Water over Time") + report_subtext = None + if is_averaging_all_wells: + num_wells = len(well_ids) + well_word = "WELL" if num_wells == 1 else "WELLS" + report_subtext = ( + f"MONTHLY AVERAGE WATER LEVEL WITHIN {num_wells} PVACD RECORDER {well_word}\n" + "AVERAGES TAKEN FROM STEEL TAPE MEASUREMENTS MADE\n" + "ON OR NEAR THE 5TH, 15TH AND 25TH OF EACH MONTH" + ) + + averages = get_waterlevel_report_averages( + well_ids=well_ids, + from_date=from_date, + to_date=to_date, + db=db, + ) + html = templates.get_template("waterlevels_report.html").render( + from_date=from_date, + to_date=to_date, + observation_chart=chart_b64, + rows=rows, + report_title="ROSWELL ARTESIAN BASIN", + report_subtext=report_subtext, + averages=averages, + ) + + pdf_io = BytesIO() + HTML(string=html).write_pdf(pdf_io) + pdf_io.seek(0) + return pdf_io + + +def _shift_year_safe(dt: datetime, new_year: int): + try: + return dt.replace(year=new_year) + except ValueError: + last_day = calendar.monthrange(new_year, dt.month)[1] + return dt.replace(year=new_year, day=min(dt.day, last_day)) + + +def _make_line_chart(data: dict, title: str): + if not data: + return "" + fig = figure(figsize=(10, 6)) + ax = fig.add_subplot(111) + for ra_label, measurements in data.items(): + sorted_measurements = sorted(measurements, key=lambda item: item[0]) + timestamps = [ts for ts, _ in sorted_measurements] + values = [val for _, val in sorted_measurements] + ax.plot(timestamps, values, label=ra_label, marker="o") + ax.set_title(title) + ax.set_xlabel("Time") + ax.set_ylabel("Depth to Water") + ax.invert_yaxis() + fig.subplots_adjust(right=0.78) + ax.legend( + loc="center left", + bbox_to_anchor=(1.02, 0.5), + borderaxespad=0.0, + frameon=True, + ) + fig.autofmt_xdate() + buf = BytesIO() + fig.savefig(buf, format="png", bbox_inches="tight") + close(fig) + return b64encode(buf.getvalue()).decode("utf-8") diff --git a/api/services/work_orders.py b/api/services/work_orders.py new file mode 100644 index 00000000..37bdb32f --- /dev/null +++ b/api/services/work_orders.py @@ -0,0 +1,254 @@ +from datetime import datetime + +from fastapi import HTTPException +from sqlalchemy import or_, select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session, joinedload + +from api.models.meter import Meters, MeterActivities +from api.models.user import Users +from api.models.work_order import workOrders, workOrderStatusLU +from api.schemas import meter + + +def _work_order_query(): + return ( + select(workOrders) + .options( + joinedload(workOrders.status), + joinedload(workOrders.meter), + joinedload(workOrders.assigned_user), + ) + ) + + +def _load_associated_activities(db: Session, work_order_ids: list[int]): + if not work_order_ids: + return {} + + relevant_activities = db.scalars( + select(MeterActivities) + .options(joinedload(MeterActivities.location)) + .where(MeterActivities.work_order_id.in_(work_order_ids)) + ).all() + + activities_by_work_order = {} + for activity in relevant_activities: + activities_by_work_order.setdefault(activity.work_order_id, []).append( + { + "id": activity.id, + "timestamp_start": activity.timestamp_start, + "timestamp_end": activity.timestamp_end, + "description": activity.description, + "submitting_user_id": activity.submitting_user_id, + "meter_id": activity.meter_id, + "activity_type_id": activity.activity_type_id, + "location_id": activity.location_id, + "location_name": activity.location.name if activity.location else None, + "ose_share": activity.ose_share, + "water_users": activity.water_users, + } + ) + + return activities_by_work_order + + +def _serialize_work_order( + work_order: workOrders, + associated_activities: list[dict] | list[MeterActivities] | None = None, +) -> meter.WorkOrder: + return meter.WorkOrder( + work_order_id=work_order.id, + ose_request_id=work_order.ose_request_id, + date_created=work_order.date_created, + creator=work_order.creator, + meter_id=work_order.meter.id, + meter_serial=work_order.meter.serial_number, + title=work_order.title, + description=work_order.description, + status=work_order.status.name, + notes=work_order.notes, + assigned_user_id=work_order.assigned_user_id, + assigned_user=work_order.assigned_user.username + if work_order.assigned_user + else None, + associated_activities=associated_activities, + ) + + +def list_work_orders( + db: Session, + filter_by_status: list[str], + start_date: datetime, + work_order_id: list[int] | None = None, + assigned_user_id: int | None = None, + q: str | None = None, +): + stmt = ( + _work_order_query() + .join(workOrderStatusLU) + .where(workOrderStatusLU.name.in_(filter_by_status)) + .where(workOrders.date_created >= start_date) + ) + + if work_order_id: + stmt = stmt.where(workOrders.id.in_(work_order_id)) + + if assigned_user_id: + stmt = stmt.where(workOrders.assigned_user_id == assigned_user_id) + + if q: + q_like = f"%{q.strip()}%" + stmt = stmt.where( + or_( + workOrders.title.ilike(q_like), + workOrders.description.ilike(q_like), + workOrders.creator.ilike(q_like), + workOrders.notes.ilike(q_like), + workOrders.meter.has(Meters.serial_number.ilike(q_like)), + ) + ) + + work_order_rows = db.scalars(stmt).all() + activities_by_work_order = _load_associated_activities( + db, [work_order.id for work_order in work_order_rows] + ) + + return [ + { + "work_order_id": work_order.id, + "ose_request_id": work_order.ose_request_id, + "date_created": work_order.date_created, + "creator": work_order.creator, + "meter_id": work_order.meter.id, + "meter_serial": work_order.meter.serial_number, + "title": work_order.title, + "description": work_order.description, + "status": work_order.status.name, + "notes": work_order.notes, + "assigned_user_id": work_order.assigned_user_id, + "assigned_user": work_order.assigned_user.username + if work_order.assigned_user + else None, + "associated_activities": activities_by_work_order.get(work_order.id, []), + } + for work_order in work_order_rows + ] + + +def create_work_order( + db: Session, new_work_order: meter.CreateWorkOrder +) -> meter.WorkOrder: + open_status = db.scalars( + select(workOrderStatusLU).where(workOrderStatusLU.name == "Open") + ).first() + + work_order = workOrders( + date_created=new_work_order.date_created, + meter_id=new_work_order.meter_id, + title=new_work_order.title, + status_id=open_status.id, + ) + + if new_work_order.description: + work_order.description = new_work_order.description + if new_work_order.notes: + work_order.notes = new_work_order.notes + if new_work_order.assigned_user_id: + work_order.assigned_user_id = new_work_order.assigned_user_id + if new_work_order.creator: + work_order.creator = new_work_order.creator + if new_work_order.ose_request_id: + work_order.ose_request_id = new_work_order.ose_request_id + + try: + db.add(work_order) + db.commit() + except IntegrityError: + raise HTTPException( + status_code=409, detail="Title empty or already exists for this meter." + ) + + work_order = db.scalars(_work_order_query().where(workOrders.id == work_order.id)).first() + return _serialize_work_order(work_order) + + +def update_work_order( + db: Session, + patch_work_order_form: meter.PatchWorkOrder, + user: Users, +) -> meter.WorkOrder: + comparison_work_order = meter.PatchWorkOrder( + work_order_id=patch_work_order_form.work_order_id, + status=patch_work_order_form.status, + notes=patch_work_order_form.notes, + ) + + update_scope = "Technician" if comparison_work_order == patch_work_order_form else "Admin" + + if user.user_role.name not in [update_scope, "Admin"]: + raise HTTPException( + status_code=403, + detail="User does not have permission to update this work order.", + ) + + work_order = db.scalars( + _work_order_query().where(workOrders.id == patch_work_order_form.work_order_id) + ).first() + + if user.user_role.name == "Technician" and work_order.assigned_user_id != user.id: + raise HTTPException( + status_code=403, + detail="User does not have permission to update this work order.", + ) + + if patch_work_order_form.title == "": + raise HTTPException(status_code=422, detail="Title cannot be empty.") + + if patch_work_order_form.title: + work_order.title = patch_work_order_form.title + if patch_work_order_form.description: + work_order.description = patch_work_order_form.description + if patch_work_order_form.status: + new_status = db.scalars( + select(workOrderStatusLU).where( + workOrderStatusLU.name == patch_work_order_form.status + ) + ).first() + work_order.status_id = new_status.id + if patch_work_order_form.notes: + work_order.notes = patch_work_order_form.notes + if patch_work_order_form.creator: + work_order.creator = patch_work_order_form.creator + if patch_work_order_form.assigned_user_id: + work_order.assigned_user_id = patch_work_order_form.assigned_user_id + + try: + db.commit() + except IntegrityError: + raise HTTPException(status_code=409, detail="Title already exists for this meter.") + + work_order = db.scalars( + _work_order_query() + .join(workOrderStatusLU) + .where(workOrders.id == patch_work_order_form.work_order_id) + ).first() + associated_activities = db.scalars( + select(MeterActivities).where(MeterActivities.work_order_id == work_order.id) + ).all() + + return _serialize_work_order(work_order, associated_activities=list(associated_activities)) + + +def delete_work_order(db: Session, work_order_id: int): + work_order = db.scalars( + select(workOrders).where(workOrders.id == work_order_id) + ).first() + + if not work_order: + raise HTTPException(status_code=404, detail="Work order not found.") + + db.delete(work_order) + db.commit() + + return {"status": "success"} From 049357c46acb7c5cf560438c3dc6f93fda6c443e Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Sat, 21 Mar 2026 22:49:56 -0500 Subject: [PATCH 10/22] feat(public): Add loading display logic --- .../display/FieldLoadingSkeleton.tsx | 14 + .../src/components/display/QueryErrorBox.tsx | 49 +++ frontend/src/components/index.ts | 2 + frontend/src/routes/chlorides.tsx | 133 ++++---- frontend/src/routes/monitoringwells.tsx | 290 +++++++++--------- .../src/views/Chlorides/ChloridesPlot.tsx | 21 ++ .../src/views/Chlorides/ChloridesSections.tsx | 98 ++++++ frontend/src/views/Chlorides/index.ts | 1 + .../MonitoringWells/MonitoringWellsPlot.tsx | 163 +++++----- .../MonitoringWellsSections.tsx | 131 ++++++++ frontend/src/views/MonitoringWells/index.ts | 1 + 11 files changed, 610 insertions(+), 293 deletions(-) create mode 100644 frontend/src/components/display/FieldLoadingSkeleton.tsx create mode 100644 frontend/src/components/display/QueryErrorBox.tsx create mode 100644 frontend/src/views/Chlorides/ChloridesSections.tsx create mode 100644 frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx diff --git a/frontend/src/components/display/FieldLoadingSkeleton.tsx b/frontend/src/components/display/FieldLoadingSkeleton.tsx new file mode 100644 index 00000000..49341062 --- /dev/null +++ b/frontend/src/components/display/FieldLoadingSkeleton.tsx @@ -0,0 +1,14 @@ +import { Box, Skeleton } from "@mui/material"; + +export const FieldLoadingSkeleton = () => { + return ( + + + + ); +}; diff --git a/frontend/src/components/display/QueryErrorBox.tsx b/frontend/src/components/display/QueryErrorBox.tsx new file mode 100644 index 00000000..c10d61ed --- /dev/null +++ b/frontend/src/components/display/QueryErrorBox.tsx @@ -0,0 +1,49 @@ +import { Alert, AlertTitle, Box, Button, SxProps, Theme } from "@mui/material"; + +type QueryErrorBoxProps = { + title?: string; + message: string; + onRetry?: () => void; + minHeight?: number | string; + sx?: SxProps; +}; + +export const QueryErrorBox = ({ + title = "Unable to Load Data", + message, + onRetry, + minHeight, + sx, +}: QueryErrorBoxProps) => { + return ( + + + Retry + + ) : undefined + } + > + {title} + {message} + + + ); +}; diff --git a/frontend/src/components/index.ts b/frontend/src/components/index.ts index 81f7025b..001b1205 100644 --- a/frontend/src/components/index.ts +++ b/frontend/src/components/index.ts @@ -21,7 +21,9 @@ export * from "./forms/WorkOrderSelect"; export * from "./forms/controlled"; export * from "./display/DirectionCard"; export * from "./display/EventTypeChip"; +export * from "./display/FieldLoadingSkeleton"; export * from "./display/IsTrueChip"; +export * from "./display/QueryErrorBox"; export * from "./display/RoleChip"; export * from "./display/StatCell"; export * from "./display/StyledToggleButton"; diff --git a/frontend/src/routes/chlorides.tsx b/frontend/src/routes/chlorides.tsx index 22955d28..e50fa2db 100644 --- a/frontend/src/routes/chlorides.tsx +++ b/frontend/src/routes/chlorides.tsx @@ -7,9 +7,7 @@ import { InputLabel, Card, CardContent, - Alert, - Button, - AlertTitle, + Box, } from "@mui/material"; import { Science } from "@mui/icons-material"; import { useMutation, useQuery } from "react-query"; @@ -29,6 +27,8 @@ import { useFetchWithAuth } from "@/hooks"; import { BackgroundBox, CustomCardHeader, + FieldLoadingSkeleton, + QueryErrorBox, ResizableSplitPanels, } from "@/components"; import { @@ -37,7 +37,7 @@ import { pageParam, routeSearchHydrator, } from "@/utils"; -import { Table, Plot } from "@/views/Chlorides"; +import { ChloridesPlotSection, ChloridesTableSection } from "@/views/Chlorides"; const searchSchema = z.object({ regionId: optionalPositiveInt.catch(undefined).default(undefined), @@ -224,8 +224,6 @@ function Chlorides() { }, }); - const error = regionsQuery.isError || manualQuery.isError; - const handleSubmitNewMeasurement = (data: Partial) => { if (regionId) { data.region_id = regionId; @@ -279,64 +277,56 @@ function Chlorides() { - {error && ( - regionsQuery.refetch()} + + {regionsQuery.isLoading ? ( + + ) : regionsQuery.isError ? ( + regionsQuery.refetch()} + /> + ) : ( + + Region + { - const next = Number(e.target.value); - navigate({ - to: "/chlorides", - search: (prev) => ({ - ...(prev as any), - regionId: next, - }), - replace: true, - }); - }} - > - {regionsQuery.isLoading && ( - Loading... - )} - {regionsQuery.isLoading && ( - Error loading Regions - )} - {regionsQuery?.data?.map((region) => ( - - Region {region.id} - {region.names.length > 0 ? ":" : null}{" "} - {region.names.slice(0, 3).join(", ")} - {region.names.length > 3 ? "..." : ""} - - ))} - - + {regionsQuery.isLoading && ( + Loading... + )} + {regionsQuery?.data?.map((region) => ( + + Region {region.id} + {region.names.length > 0 ? ":" : null}{" "} + {region.names.slice(0, 3).join(", ")} + {region.names.length > 3 ? "..." : ""} + + ))} + + + )} + { @@ -355,21 +345,20 @@ function Chlorides() { }); }} left={ - m.timestamp) ?? []} - manual_vals={ - manualQuery?.data?.map((m) => ({ - value: m.value, - well: m.well.ra_number, - })) ?? [] - } + isError={manualQuery.isError} + isRegionSelected={!!regionId} + rows={manualQuery.data ?? []} + onRetry={() => manualQuery.refetch()} /> } right={ - manualQuery.refetch()} onOpenModal={() => setIsNewModalOpen(true)} onMeasurementSelect={handleMeasurementSelect} /> diff --git a/frontend/src/routes/monitoringwells.tsx b/frontend/src/routes/monitoringwells.tsx index 53c5b4e3..5b812d05 100644 --- a/frontend/src/routes/monitoringwells.tsx +++ b/frontend/src/routes/monitoringwells.tsx @@ -8,9 +8,7 @@ import { CardContent, ListSubheader, useTheme, - Alert, - Button, - AlertTitle, + Box, } from "@mui/material"; import { useQuery, useQueryClient } from "react-query"; import { useAuthUser } from "react-auth-kit"; @@ -39,9 +37,14 @@ import { CreateModal, UpdateModal } from "@/components/Modals/MonitoredWell"; import { CustomCardHeader, BackgroundBox, + FieldLoadingSkeleton, + QueryErrorBox, ResizableSplitPanels, } from "@/components"; -import { Table, Plot } from "@/views/MonitoringWells"; +import { + MonitoringWellsPlotSection, + MonitoringWellsTableSection, +} from "@/views/MonitoringWells"; import { optionalPositiveInt, pageParam, routeSearchHydrator } from "@/utils"; const searchSchema = z.object({ @@ -194,12 +197,6 @@ function MonitoringWells() { const updateMeasurement = useUpdateWaterLevel(() => refetchManual()); const deleteMeasurement = useDeleteWaterLevel(); - const error = - monitoredWellsQuery.isError || - errorManual || - errorSt2 || - errorJohnsonSensorData; - const handleSubmitNewMeasurement = (data: Partial) => { if (wellId) { data.well_id = wellId; @@ -277,126 +274,121 @@ function MonitoringWells() { const [outsideRecorderWells, regularWells] = separateAndSortMonitoredWells( monitoredWellsQuery?.data, ); + const plotLoadingSources = [ + isLoadingManual ? "Manual" : null, + isLoadingSt2 ? "Continuous" : null, + isLoadingJohnsonSensorData ? "Woodpecker" : null, + ].filter((source): source is string => source !== null); return ( - {error && ( - monitoredWellsQuery.refetch()} - > - Retry - - } - > - Error Loading Data - We couldn’t load monitoring wells. Please check your connection or - try again. - - )} - - Site - { + const next = Number(e.target.value); + navigate({ + to: "/monitoringwells", + search: (prev) => ({ + ...(prev as any), + wellId: next, + }), + replace: true, + }); }} > - {well.name?.trim() ? well.name : "Unnamed Well"} - - ))} - - + {monitoredWellsQuery.isFetching && ( + Loading... + )} + {(monitoredWellsQuery?.data?.length ?? 0 > 0) ? ( + + Wells + + ) : null} + {regularWells.map((well) => ( + + {well.name?.trim() ? well.name : "Unnamed Well"} + + ))} + {outsideRecorderWells.length > 0 ? ( + + Outside Recorder Wells + + ) : null} + {outsideRecorderWells.map((well) => ( + + {well.name?.trim() ? well.name : "Unnamed Well"} + + ))} + + + )} + { @@ -415,48 +407,42 @@ function MonitoringWells() { }); }} left={ - m.timestamp)} - manual_vals={(Array.isArray(manualMeasurements) - ? manualMeasurements - : [] - ).map((m) => m.value)} - logger_dates={ - Array.isArray(st2Measurements) - ? (st2Measurements ?? []).map((m) => m.resultTime) - : [] - } - logger_vals={ - Array.isArray(st2Measurements) - ? st2Measurements.map((m) => m.result) - : [] - } - sensor_dates={ - Array.isArray(johnsonSensorDataMeasurements) - ? johnsonSensorDataMeasurements?.map((m) => m.timestamp) - : undefined - } - sensor_vals={ - Array.isArray(johnsonSensorDataMeasurements) - ? johnsonSensorDataMeasurements?.map((m) => m.value) - : undefined + loadingSources={plotLoadingSources} + isError={ + !!errorManual || !!errorSt2 || !!errorJohnsonSensorData } + onRetry={() => { + refetchManual(); + if (dataStreamId) { + queryClient.invalidateQueries({ + queryKey: ["st2Measurements", dataStreamId], + }); + } + if (wellId === 2599) { + queryClient.invalidateQueries({ + queryKey: ["woodpeckers", wellId], + }); + } + }} /> } right={ -
well.id == wellId, )} isWellSelected={!!wellId} + isError={!!errorManual} + onRetry={() => refetchManual()} onOpenModal={() => setIsNewModalOpen(true)} onMeasurementSelect={handleMeasurementSelect} /> diff --git a/frontend/src/views/Chlorides/ChloridesPlot.tsx b/frontend/src/views/Chlorides/ChloridesPlot.tsx index 85389b81..243da2f0 100644 --- a/frontend/src/views/Chlorides/ChloridesPlot.tsx +++ b/frontend/src/views/Chlorides/ChloridesPlot.tsx @@ -8,10 +8,12 @@ export const Plot = ({ manual_dates, manual_vals, isLoading, + emptyMessage, }: { manual_dates: Date[]; manual_vals: { value: number; well: string }[]; isLoading: boolean; + emptyMessage?: string; }) => { const plotContainerRef = useRef(null); const plotRef = useRef(null); @@ -109,6 +111,25 @@ export const Plot = ({ Loading plot data... + ) : !hasData && emptyMessage ? ( + + + {emptyMessage} + + ) : ( void; +}; + +export const ChloridesPlotSection = ({ + isLoading, + isError, + isRegionSelected, + rows, + onRetry, +}: ChloridesPlotSectionProps) => { + if (isError) { + return ( + + ); + } + + return ( + m.timestamp)} + manual_vals={rows.map((m) => ({ + value: m.value, + well: m.well.ra_number, + }))} + /> + ); +}; + +type ChloridesTableSectionProps = { + rows: RegionMeasurementDTO[]; + isRegionSelected: boolean; + isError: boolean; + onRetry: () => void; + onOpenModal: () => void; + onMeasurementSelect: (data: { + row: { + id: number; + timestamp: Dayjs; + value: number; + submitting_user: { + id: number; + }; + well: { + id: number; + ra_number: string; + }; + }; + }) => void; +}; + +export const ChloridesTableSection = ({ + rows, + isRegionSelected, + isError, + onRetry, + onOpenModal, + onMeasurementSelect, +}: ChloridesTableSectionProps) => { + if (isError) { + return ( + + ); + } + + return ( +
+ ); +}; diff --git a/frontend/src/views/Chlorides/index.ts b/frontend/src/views/Chlorides/index.ts index 1f43d6af..0c0bf9b5 100644 --- a/frontend/src/views/Chlorides/index.ts +++ b/frontend/src/views/Chlorides/index.ts @@ -1,2 +1,3 @@ export * from "./ChloridesPlot"; +export * from "./ChloridesSections"; export * from "./ChloridesTable"; diff --git a/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx b/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx index 5a3288a1..deba842a 100644 --- a/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx +++ b/frontend/src/views/MonitoringWells/MonitoringWellsPlot.tsx @@ -12,7 +12,8 @@ export const Plot = ({ sensor_dates, sensor_vals, isLoading, - isContinuousLoading = false, + emptyMessage, + loadingSources = [], }: { manual_dates: Date[]; manual_vals: number[]; @@ -21,7 +22,8 @@ export const Plot = ({ sensor_dates?: Date[]; sensor_vals?: number[]; isLoading: boolean; - isContinuousLoading?: boolean; + emptyMessage?: string; + loadingSources?: string[]; }) => { const plotContainerRef = useRef(null); const plotRef = useRef(null); @@ -42,61 +44,59 @@ export const Plot = ({ } }; - const data: Partial[] = useMemo( - () => { - const traces: Partial[] = []; + const data: Partial[] = useMemo(() => { + const traces: Partial[] = []; - if (manual_dates.length > 0) { - traces.push({ - x: manual_dates, - y: manual_vals, - type: "scattergl", - mode: "markers", - marker: { color: "red" }, - name: "Manual", - hovertemplate: - "Date: %{x|%B %-d, %Y}
Value: %{y} ft%{fullData.name}", - }); - } + if (manual_dates.length > 0) { + traces.push({ + x: manual_dates, + y: manual_vals, + type: "scattergl", + mode: "markers", + marker: { color: "red" }, + name: "Manual", + hovertemplate: + "Date: %{x|%B %-d, %Y}
Value: %{y} ft%{fullData.name}", + }); + } - if (logger_dates.length > 0) { - traces.push({ - x: logger_dates, - y: logger_vals, - type: "scattergl", - marker: { color: "blue" }, - name: "Continuous", - hovertemplate: - "Date: %{x|%B %-d, %Y}
Value: %{y} ft%{fullData.name}", - }); - } + if (logger_dates.length > 0) { + traces.push({ + x: logger_dates, + y: logger_vals, + type: "scattergl", + marker: { color: "blue" }, + name: "Continuous", + hovertemplate: + "Date: %{x|%B %-d, %Y}
Value: %{y} ft%{fullData.name}", + }); + } - if (sensor_dates && sensor_dates.length > 0) { - traces.push({ - x: sensor_dates, - y: sensor_vals, - type: "scattergl", - mode: "markers", - marker: { color: "purple" }, - name: "Woodpecker Sensor", - hovertemplate: - "Date: %{x|%B %-d, %Y}
Value: %{y} ft%{fullData.name}", - }); - } + if (sensor_dates && sensor_dates.length > 0) { + traces.push({ + x: sensor_dates, + y: sensor_vals, + type: "scattergl", + mode: "markers", + marker: { color: "purple" }, + name: "Woodpecker Sensor", + hovertemplate: + "Date: %{x|%B %-d, %Y}
Value: %{y} ft%{fullData.name}", + }); + } - return traces; - }, - [ - manual_dates, - manual_vals, - logger_dates, - logger_vals, - sensor_dates, - sensor_vals, - ], - ); + return traces; + }, [ + manual_dates, + manual_vals, + logger_dates, + logger_vals, + sensor_dates, + sensor_vals, + ]); const hasData = data.length > 0; + const hasLoadingOverlay = hasData && loadingSources.length > 0; useEffect(() => { const container = plotContainerRef.current; @@ -150,8 +150,27 @@ export const Plot = ({ Loading plot data... + ) : !hasData && emptyMessage ? ( + + + {emptyMessage} + + ) : ( - + (prev === "pan" ? "zoom" : "pan")) } > - + - {isContinuousLoading && ( + {hasLoadingOverlay && ( - - Continuous data is still loading. More points will appear - automatically. - + + + Loading additional data + + + {loadingSources.join(", ")} still querying. + + )} diff --git a/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx b/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx new file mode 100644 index 00000000..5190b99b --- /dev/null +++ b/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx @@ -0,0 +1,131 @@ +import { QueryErrorBox } from "@/components"; +import { + MonitoredWell, + ST2Measurement, + WellMeasurementDTO, +} from "@/interfaces"; +import { Plot, Table } from "@/views/MonitoringWells"; +import { Dayjs } from "dayjs"; + +type MonitoringWellsPlotSectionProps = { + manualMeasurements?: WellMeasurementDTO[]; + st2Measurements?: ST2Measurement[]; + johnsonSensorDataMeasurements?: WellMeasurementDTO[]; + isWellSelected: boolean; + isLoading: boolean; + loadingSources?: string[]; + isError: boolean; + onRetry: () => void; +}; + +export const MonitoringWellsPlotSection = ({ + manualMeasurements, + st2Measurements, + johnsonSensorDataMeasurements, + isWellSelected, + isLoading, + loadingSources, + isError, + onRetry, +}: MonitoringWellsPlotSectionProps) => { + if (isError) { + return ( + + ); + } + + return ( + m.timestamp)} + manual_vals={(Array.isArray(manualMeasurements) + ? manualMeasurements + : [] + ).map((m) => m.value)} + logger_dates={ + Array.isArray(st2Measurements) + ? st2Measurements.map((m) => m.resultTime) + : [] + } + logger_vals={ + Array.isArray(st2Measurements) + ? st2Measurements.map((m) => m.result) + : [] + } + sensor_dates={ + Array.isArray(johnsonSensorDataMeasurements) + ? johnsonSensorDataMeasurements.map((m) => m.timestamp) + : undefined + } + sensor_vals={ + Array.isArray(johnsonSensorDataMeasurements) + ? johnsonSensorDataMeasurements.map((m) => m.value) + : undefined + } + /> + ); +}; + +type MonitoringWellsTableSectionProps = { + rows: WellMeasurementDTO[]; + selectedWell?: MonitoredWell; + isWellSelected: boolean; + isError: boolean; + onRetry: () => void; + onOpenModal: () => void; + onMeasurementSelect: (data: { + row: { + id: number; + timestamp: Dayjs; + value: number; + submitting_user: { + id: number; + }; + }; + }) => void; +}; + +export const MonitoringWellsTableSection = ({ + rows, + selectedWell, + isWellSelected, + isError, + onRetry, + onOpenModal, + onMeasurementSelect, +}: MonitoringWellsTableSectionProps) => { + if (isError) { + return ( + + ); + } + + return ( +
+ ); +}; diff --git a/frontend/src/views/MonitoringWells/index.ts b/frontend/src/views/MonitoringWells/index.ts index b2e83932..03e2ae4b 100644 --- a/frontend/src/views/MonitoringWells/index.ts +++ b/frontend/src/views/MonitoringWells/index.ts @@ -1,2 +1,3 @@ export * from "./MonitoringWellsPlot"; +export * from "./MonitoringWellsSections"; export * from "./MonitoringWellsTable"; From 795337cd10460d15e7489409e1d3fa11cc470567 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 11:27:03 -0500 Subject: [PATCH 11/22] fix(OSE): Patch service to use orm name for parts used --- api/services/ose.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/services/ose.py b/api/services/ose.py index 1bfff605..6cdc3750 100644 --- a/api/services/ose.py +++ b/api/services/ose.py @@ -57,7 +57,7 @@ def _serialize_activity( ) -> ose.ActivityDTO: notes_strings = [note.note for note in activity.notes] parts_used_strings = [ - f"{part.part_type.name} ({part.part_number})" for part in activity.parts_used + f"{part.part_type.name} ({part.part_number})" for part in activity.parts_used_links ] services_performed_strings = [ service.service_name for service in activity.services_performed @@ -132,7 +132,7 @@ def get_shared_history( select(MeterActivities) .options( joinedload(MeterActivities.activity_type), - joinedload(MeterActivities.parts_used), + joinedload(MeterActivities.parts_used_links), joinedload(MeterActivities.meter), joinedload(MeterActivities.work_order), joinedload(MeterActivities.well), From e04fb31d87c0098756fe3522a9031ca06f93789f Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 11:43:45 -0500 Subject: [PATCH 12/22] feat(Chlorides|MonitoringWells[Table]): Add loading state to table --- frontend/src/routes/chlorides.tsx | 1 + frontend/src/routes/monitoringwells.tsx | 1 + .../src/views/Chlorides/ChloridesSections.tsx | 3 ++ .../src/views/Chlorides/ChloridesTable.tsx | 32 ++++++++++++++- .../MonitoringWellsSections.tsx | 3 ++ .../MonitoringWells/MonitoringWellsTable.tsx | 39 ++++++++++++++++++- 6 files changed, 76 insertions(+), 3 deletions(-) diff --git a/frontend/src/routes/chlorides.tsx b/frontend/src/routes/chlorides.tsx index e50fa2db..91487601 100644 --- a/frontend/src/routes/chlorides.tsx +++ b/frontend/src/routes/chlorides.tsx @@ -357,6 +357,7 @@ function Chlorides() { manualQuery.refetch()} onOpenModal={() => setIsNewModalOpen(true)} diff --git a/frontend/src/routes/monitoringwells.tsx b/frontend/src/routes/monitoringwells.tsx index 5b812d05..c3b167a9 100644 --- a/frontend/src/routes/monitoringwells.tsx +++ b/frontend/src/routes/monitoringwells.tsx @@ -441,6 +441,7 @@ function MonitoringWells() { (well) => well.id == wellId, )} isWellSelected={!!wellId} + isLoading={isLoadingManual} isError={!!errorManual} onRetry={() => refetchManual()} onOpenModal={() => setIsNewModalOpen(true)} diff --git a/frontend/src/views/Chlorides/ChloridesSections.tsx b/frontend/src/views/Chlorides/ChloridesSections.tsx index e2824004..34314274 100644 --- a/frontend/src/views/Chlorides/ChloridesSections.tsx +++ b/frontend/src/views/Chlorides/ChloridesSections.tsx @@ -49,6 +49,7 @@ export const ChloridesPlotSection = ({ type ChloridesTableSectionProps = { rows: RegionMeasurementDTO[]; isRegionSelected: boolean; + isLoading: boolean; isError: boolean; onRetry: () => void; onOpenModal: () => void; @@ -71,6 +72,7 @@ type ChloridesTableSectionProps = { export const ChloridesTableSection = ({ rows, isRegionSelected, + isLoading, isError, onRetry, onOpenModal, @@ -91,6 +93,7 @@ export const ChloridesTableSection = ({
diff --git a/frontend/src/views/Chlorides/ChloridesTable.tsx b/frontend/src/views/Chlorides/ChloridesTable.tsx index e4948862..54f41467 100644 --- a/frontend/src/views/Chlorides/ChloridesTable.tsx +++ b/frontend/src/views/Chlorides/ChloridesTable.tsx @@ -1,5 +1,5 @@ import { useMemo } from "react"; -import { Box, Button } from "@mui/material"; +import { Box, Button, CircularProgress, Typography } from "@mui/material"; import { DataGrid, GridPagination, GridColDef } from "@mui/x-data-grid"; import { Add } from "@mui/icons-material"; import dayjs, { Dayjs } from "dayjs"; @@ -20,17 +20,20 @@ declare module "@mui/x-data-grid" { interface FooterExtraProps { onOpenModal: () => void; isRegionSelected: boolean; + isLoading: boolean; } export const Table = ({ rows, onOpenModal, isRegionSelected, + isLoading, onMeasurementSelect, }: { rows: RegionMeasurementDTO[]; onOpenModal: () => void; isRegionSelected: boolean; + isLoading: boolean; onMeasurementSelect: (data: { row: { id: number; @@ -94,6 +97,7 @@ export const Table = ({ void; isRegionSelected?: boolean; + isLoading?: boolean; }) => { const isAuthenticated = useIsAuthenticated(); return ( @@ -143,6 +151,7 @@ const Footer = ({ variant="contained" size="small" onClick={onOpenModal} + disabled={isLoading} sx={{ flexShrink: 0, width: { xs: "100%", sm: "auto" }, ml: 1.5 }} startIcon={} > @@ -154,3 +163,24 @@ const Footer = ({ ); }; + +const LoadingOverlay = () => ( + + + + Loading table data... + + +); diff --git a/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx b/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx index 5190b99b..4fad83a1 100644 --- a/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx +++ b/frontend/src/views/MonitoringWells/MonitoringWellsSections.tsx @@ -84,6 +84,7 @@ type MonitoringWellsTableSectionProps = { rows: WellMeasurementDTO[]; selectedWell?: MonitoredWell; isWellSelected: boolean; + isLoading: boolean; isError: boolean; onRetry: () => void; onOpenModal: () => void; @@ -103,6 +104,7 @@ export const MonitoringWellsTableSection = ({ rows, selectedWell, isWellSelected, + isLoading, isError, onRetry, onOpenModal, @@ -124,6 +126,7 @@ export const MonitoringWellsTableSection = ({ rows={rows} selectedWell={selectedWell} isWellSelected={isWellSelected} + isLoading={isLoading} onOpenModal={onOpenModal} onMeasurementSelect={onMeasurementSelect} /> diff --git a/frontend/src/views/MonitoringWells/MonitoringWellsTable.tsx b/frontend/src/views/MonitoringWells/MonitoringWellsTable.tsx index 85e6944e..46d92ff5 100644 --- a/frontend/src/views/MonitoringWells/MonitoringWellsTable.tsx +++ b/frontend/src/views/MonitoringWells/MonitoringWellsTable.tsx @@ -1,5 +1,11 @@ import { useMemo } from "react"; -import { Box, Button, Tooltip } from "@mui/material"; +import { + Box, + Button, + CircularProgress, + Tooltip, + Typography, +} from "@mui/material"; import { DataGrid, GridPagination, GridColDef } from "@mui/x-data-grid"; import { Add } from "@mui/icons-material"; import dayjs, { Dayjs } from "dayjs"; @@ -18,6 +24,7 @@ declare module "@mui/x-data-grid" { onOpenModal: () => void; isWellSelected: boolean; selectedWell?: MonitoredWell; + isLoading: boolean; } } @@ -26,12 +33,14 @@ export const Table = ({ onOpenModal, isWellSelected, selectedWell, + isLoading, onMeasurementSelect, }: { rows: WellMeasurementDTO[]; onOpenModal: () => void; isWellSelected: boolean; selectedWell?: MonitoredWell; + isLoading: boolean; onMeasurementSelect: (data: { row: { id: number; @@ -84,6 +93,7 @@ export const Table = ({ void; isWellSelected: boolean; selectedWell?: MonitoredWell; + isLoading: boolean; }) => { const isAuthenticated = useIsAuthenticated(); const isPlugged = selectedWell?.well_status.status === "plugged"; @@ -148,7 +162,7 @@ const Footer = ({ variant="contained" size="small" onClick={onOpenModal} - disabled={isPlugged} + disabled={isPlugged || isLoading} sx={{ flexShrink: 0, width: { xs: "100%", sm: "auto" }, @@ -166,3 +180,24 @@ const Footer = ({ ); }; + +const LoadingOverlay = () => ( + + + + Loading table data... + + +); From 44f5c7de9e4abf094afbe8d679ec9bd5b1a84c48 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 11:56:12 -0500 Subject: [PATCH 13/22] fix(ProfileSection): Update UI to fix better on tablet devices --- frontend/src/views/Settings/components/ProfileSection.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/views/Settings/components/ProfileSection.tsx b/frontend/src/views/Settings/components/ProfileSection.tsx index aae63df1..01360e21 100644 --- a/frontend/src/views/Settings/components/ProfileSection.tsx +++ b/frontend/src/views/Settings/components/ProfileSection.tsx @@ -171,14 +171,14 @@ export function ProfileSection({ description="Upload or replace your account image." > - + - + Date: Wed, 25 Mar 2026 12:10:00 -0500 Subject: [PATCH 14/22] fix(SessionShared): Patch parsing of UTC so it will display the correct time based on the user's timezone --- .../Settings/components/SessionShared.tsx | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/frontend/src/views/Settings/components/SessionShared.tsx b/frontend/src/views/Settings/components/SessionShared.tsx index bdf70bf5..cb67366b 100644 --- a/frontend/src/views/Settings/components/SessionShared.tsx +++ b/frontend/src/views/Settings/components/SessionShared.tsx @@ -13,19 +13,31 @@ import { } from "@mui/icons-material"; import type { ComponentType } from "react"; +function parseUtcDate(value?: string | null) { + if (!value) return null; + + const normalizedValue = + /[zZ]|[+-]\d{2}:\d{2}$/.test(value) ? value : `${value}Z`; + const parsedDate = new Date(normalizedValue); + + return Number.isNaN(parsedDate.getTime()) ? null : parsedDate; +} + export function formatDateTime(value?: string | null) { - if (!value) return "Not available"; + const parsedDate = parseUtcDate(value); + if (!parsedDate) return "Not available"; return new Intl.DateTimeFormat("en-US", { dateStyle: "medium", timeStyle: "short", - }).format(new Date(value)); + }).format(parsedDate); } export function formatRelativeTime(value?: string | null) { - if (!value) return "Unknown"; + const parsedDate = parseUtcDate(value); + if (!parsedDate) return "Unknown"; - const timestamp = new Date(value).getTime(); + const timestamp = parsedDate.getTime(); const diffMs = timestamp - Date.now(); const absMinutes = Math.round(Math.abs(diffMs) / (1000 * 60)); From 60c04fcab26d17d1068e0b1c444a1c36e287be1c Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 12:48:46 -0500 Subject: [PATCH 15/22] fix(SelectedActivityDetails): Patch fetching & posting of the parts used --- frontend/src/components/forms/ChipSelect.tsx | 19 +++++++------ .../forms/controlled/NotesChipSelect.tsx | 22 +++++++++------ .../forms/controlled/PartsChipSelect.tsx | 28 +++++++++++++------ .../forms/controlled/ServicesChipSelect.tsx | 22 +++++++++------ .../Meters/MeterHistory/MeterHistory.tsx | 14 +++++++++- .../MeterHistory/SelectedActivityDetails.tsx | 2 +- 6 files changed, 72 insertions(+), 35 deletions(-) diff --git a/frontend/src/components/forms/ChipSelect.tsx b/frontend/src/components/forms/ChipSelect.tsx index 39b656e9..8a26b180 100644 --- a/frontend/src/components/forms/ChipSelect.tsx +++ b/frontend/src/components/forms/ChipSelect.tsx @@ -3,6 +3,7 @@ import { Chip, FormControl, InputLabel, + ListItemText, MenuItem, OutlinedInput, Select, @@ -15,16 +16,18 @@ interface chipselectitem { } export default function ChipSelect({ + selected_ids, selected_values, options, label, - onSelect, + onChange, onDelete, }: { + selected_ids?: number[]; selected_values?: chipselectitem[]; options?: chipselectitem[]; label: string; - onSelect: (selected_id: number) => void; + onChange: (selected_ids: number[]) => void; onDelete: (delete_id: number) => void; }) { return ( @@ -32,14 +35,12 @@ export default function ChipSelect({ {label} diff --git a/frontend/src/components/forms/controlled/NotesChipSelect.tsx b/frontend/src/components/forms/controlled/NotesChipSelect.tsx index 883967e5..5cff1d25 100644 --- a/frontend/src/components/forms/controlled/NotesChipSelect.tsx +++ b/frontend/src/components/forms/controlled/NotesChipSelect.tsx @@ -15,6 +15,7 @@ export const NotesChipSelect = ({ name, control }: any) => { render={({ field }) => { return ( note.id) ?? []} selected_values={ field.value?.map((note: NoteTypeLU) => ({ id: note.id, @@ -28,17 +29,22 @@ export const NotesChipSelect = ({ name, control }: any) => { })) ?? [] } label="Notes" - onSelect={(selected_id) => { - field.onChange([ - ...field.value, - notesList.data?.find( - (note: NoteTypeLU) => note.id === selected_id, - ), - ]); + onChange={(selected_ids) => { + field.onChange( + selected_ids + .map((selected_id) => + notesList.data?.find( + (note: NoteTypeLU) => note.id === selected_id, + ), + ) + .filter(Boolean), + ); }} onDelete={(delete_id) => { field.onChange( - field.value.filter((note: NoteTypeLU) => note.id !== delete_id), + (field.value ?? []).filter( + (note: NoteTypeLU) => note.id !== delete_id, + ), ); }} /> diff --git a/frontend/src/components/forms/controlled/PartsChipSelect.tsx b/frontend/src/components/forms/controlled/PartsChipSelect.tsx index 9a9751f6..6112f00d 100644 --- a/frontend/src/components/forms/controlled/PartsChipSelect.tsx +++ b/frontend/src/components/forms/controlled/PartsChipSelect.tsx @@ -15,28 +15,40 @@ export const PartsChipSelect = ({ name, control, meterid }: any) => { render={({ field }) => { return ( part.id) ?? []} selected_values={ field.value?.map((part: Part) => ({ id: part.id, - name: part.part_type?.name + " " + part.part_number, + name: [part.part_type?.name, part.part_number] + .filter(Boolean) + .join(" "), })) ?? [] } options={ partsList.data?.map((part: Part) => ({ id: part.id, - name: part.part_type?.name + " " + part.part_number, + name: [part.part_type?.name, part.part_number] + .filter(Boolean) + .join(" "), })) ?? [] } label="Parts Used" - onSelect={(selected_id) => { - field.onChange([ - ...field.value, - partsList.data?.find((part: Part) => part.id === selected_id), - ]); + onChange={(selected_ids) => { + field.onChange( + selected_ids + .map((selected_id) => + partsList.data?.find( + (part: Part) => part.id === selected_id, + ), + ) + .filter(Boolean), + ); }} onDelete={(delete_id) => { field.onChange( - field.value.filter((part: Part) => part.id !== delete_id), + (field.value ?? []).filter( + (part: Part) => part.id !== delete_id, + ), ); }} /> diff --git a/frontend/src/components/forms/controlled/ServicesChipSelect.tsx b/frontend/src/components/forms/controlled/ServicesChipSelect.tsx index c3010108..6dcbc600 100644 --- a/frontend/src/components/forms/controlled/ServicesChipSelect.tsx +++ b/frontend/src/components/forms/controlled/ServicesChipSelect.tsx @@ -15,6 +15,9 @@ export const ServicesChipSelect = ({ name, control }: any) => { render={({ field }) => { return ( service.id) ?? [] + } selected_values={ field.value?.map((service: ServiceTypeLU) => ({ id: service.id, @@ -28,17 +31,20 @@ export const ServicesChipSelect = ({ name, control }: any) => { })) ?? [] } label="Services" - onSelect={(selected_id) => { - field.onChange([ - ...field.value, - servicesList.data?.find( - (service: ServiceTypeLU) => service.id === selected_id, - ), - ]); + onChange={(selected_ids) => { + field.onChange( + selected_ids + .map((selected_id) => + servicesList.data?.find( + (service: ServiceTypeLU) => service.id === selected_id, + ), + ) + .filter(Boolean), + ); }} onDelete={(delete_id) => { field.onChange( - field.value.filter( + (field.value ?? []).filter( (service: ServiceTypeLU) => service.id !== delete_id, ), ); diff --git a/frontend/src/views/Meters/MeterHistory/MeterHistory.tsx b/frontend/src/views/Meters/MeterHistory/MeterHistory.tsx index 2133ace5..11302f54 100644 --- a/frontend/src/views/Meters/MeterHistory/MeterHistory.tsx +++ b/frontend/src/views/Meters/MeterHistory/MeterHistory.tsx @@ -121,6 +121,18 @@ export const MeterHistory = ({ }; // Function to convert MeterHistoryDTO to PatchMeterActivity + function getHistoryItemParts(historyItem: MeterHistoryDTO) { + const directParts = historyItem.history_item.parts_used; + if (Array.isArray(directParts)) return directParts; + + const partLinks = historyItem.history_item.parts_used_links; + if (!Array.isArray(partLinks)) return []; + + return partLinks + .map((link: any) => link.part) + .filter((part: any) => part != null); + } + function convertHistoryActivity( historyItem: MeterHistoryDTO, ): PatchActivityForm { @@ -150,7 +162,7 @@ export const MeterHistory = ({ notes: historyItem.history_item.notes, services: historyItem.history_item.services_performed, - parts_used: historyItem.history_item.parts_used, + parts_used: getHistoryItemParts(historyItem), ose_share: historyItem.history_item.ose_share, }; diff --git a/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx b/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx index 200b7bf7..ebaeb9cd 100644 --- a/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx +++ b/frontend/src/views/Meters/MeterHistory/SelectedActivityDetails.tsx @@ -87,7 +87,7 @@ export const SelectedActivityDetails = ({ note_ids: data.notes.map((note: any) => note.id), service_ids: data.services.map((service: any) => service.id), - part_ids: data.parts_used.map((part: any) => part.id), + part_ids: (data.parts_used ?? []).map((part: any) => part.id), }; updateActivity.mutate(activity_data); }; From 3bc722ae7d091815b0152881eb2a320f0d4bd433 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 13:04:02 -0500 Subject: [PATCH 16/22] feat(HOme): update home page ui --- frontend/src/views/Home.tsx | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/frontend/src/views/Home.tsx b/frontend/src/views/Home.tsx index 63e745a6..2e92f053 100644 --- a/frontend/src/views/Home.tsx +++ b/frontend/src/views/Home.tsx @@ -10,7 +10,6 @@ import { Typography, } from "@mui/material"; import { Link } from "@tanstack/react-router"; -import HomeIcon from "@mui/icons-material/Home"; import ArrowOutwardIcon from "@mui/icons-material/ArrowOutward"; import AssignmentTurnedInOutlinedIcon from "@mui/icons-material/AssignmentTurnedInOutlined"; import AutorenewOutlinedIcon from "@mui/icons-material/AutorenewOutlined"; @@ -18,7 +17,7 @@ import BuildCircleOutlinedIcon from "@mui/icons-material/BuildCircleOutlined"; import FactCheckOutlinedIcon from "@mui/icons-material/FactCheckOutlined"; import MonitorHeartIcon from "@mui/icons-material/MonitorHeart"; import ScienceIcon from "@mui/icons-material/Science"; -import { BackgroundBox, CustomCardHeader } from "@/components"; +import { BackgroundBox } from "@/components"; import pvacd_logo from "@/img/pvacd_logo.png"; import meter_field from "@/img/meter_field.jpg"; import meter_storage from "@/img/meter_storage.jpg"; @@ -88,10 +87,9 @@ export const Home = () => { "linear-gradient(135deg, #f8fbff 0%, #eef5ff 55%, #f6f9ff 100%)", }} > - - + { - + { image={meter_field} alt="Field Meter" sx={{ - maxWidth: { xs: "100%", md: 200, xl: 220 }, - width: "100%", - height: { xs: 220, md: "auto" }, + flex: 1, + minWidth: 0, + maxWidth: { xs: "100%", sm: "calc(50% - 8px)" }, + width: { xs: "100%", sm: "auto" }, + height: { xs: 220 }, objectFit: "cover", borderRadius: 4, boxShadow: "0 18px 40px rgba(0,0,0,0.14)", @@ -218,9 +218,11 @@ export const Home = () => { image={meter_storage} alt="Storage Meter" sx={{ - maxWidth: { xs: "100%", md: 200, xl: 220 }, - width: "100%", - height: { xs: 220, md: "auto" }, + flex: 1, + minWidth: 0, + maxWidth: { xs: "100%", sm: "calc(50% - 8px)" }, + width: { xs: "100%", sm: "auto" }, + height: { xs: 220 }, objectFit: "cover", borderRadius: 4, boxShadow: "0 18px 40px rgba(0,0,0,0.14)", From eaca4447bd3848f0ba7382050d5416685e8eae64 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 15:11:22 -0500 Subject: [PATCH 17/22] feat(Map): Update selected icon to stand out in the map --- api/routes/meters.py | 27 +++- .../MeterSelection/MeterSelectionMap.tsx | 54 +++++++- .../views/WellManagement/WellSelectionMap.tsx | 121 +++++++++++++++--- 3 files changed, 181 insertions(+), 21 deletions(-) diff --git a/api/routes/meters.py b/api/routes/meters.py index 6bb8c6c9..77eca4f7 100644 --- a/api/routes/meters.py +++ b/api/routes/meters.py @@ -8,7 +8,13 @@ from api.schemas import meter from api.schemas import well from api.models.location import LandOwners, Locations -from api.models.meter import Meters, MeterStatusLU, MeterTypeLU, meterRegisters +from api.models.meter import ( + ActivityTypeLU, + Meters, + MeterStatusLU, + MeterTypeLU, + meterRegisters, +) from api.models.well import Wells from api.routes.utils import _patch, _get from api.session import get_db @@ -202,17 +208,32 @@ def get_meters_locations( if not meter_ids: return [] # Short-circuit if nothing matched + pm_activity_type_id = db.scalars( + select(ActivityTypeLU.id).where( + ActivityTypeLU.name == "Preventative Maintenance" + ) + ).first() + + if not pm_activity_type_id: + raise HTTPException( + status_code=500, + detail="Preventative Maintenance activity type is not configured.", + ) + # Query latest PMs for those meters pm_query = text( """ SELECT MAX(timestamp_start) AS last_pm, meter_id FROM "MeterActivities" - WHERE activity_type_id = 4 + WHERE activity_type_id = :pm_activity_type_id AND meter_id = ANY(:mids) GROUP BY meter_id """ ) - pm_years = db.execute(pm_query, {"mids": meter_ids}).fetchall() + pm_years = db.execute( + pm_query, + {"mids": meter_ids, "pm_activity_type_id": pm_activity_type_id}, + ).fetchall() pm_dict = {row.meter_id: row.last_pm for row in pm_years} # Map to DTOs manually for added performance diff --git a/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx b/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx index 1522a538..1436dcd2 100644 --- a/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx +++ b/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx @@ -98,6 +98,28 @@ export default function MeterSelectionMap({ replace: true, }); }; + const selectedMeterId = search.meter_id; + const selectedMeter = + meterMarkers.data?.find( + (meter: MeterMapDTO) => meter.id === selectedMeterId, + ) ?? null; + + const renderMarkerHtml = (color: string, isSelected: boolean) => { + const size = isSelected ? 32 : 12; + const borderWidth = isSelected ? 3 : 2; + const boxShadow = isSelected + ? "0 0 0 4px rgba(255, 255, 255, 0.95), 0 0 0 5px rgba(0, 0, 0, 0.95)" + : "none"; + + return `
`; + }; return ( <> @@ -170,6 +192,10 @@ export default function MeterSelectionMap({ > {meterMarkers.isSuccess && meterMarkers.data.map((meter: MeterMapDTO) => { + if (meter.id === selectedMeterId) { + return null; + } + const color = meter.last_pm ? getMeterMarkerColor(meter.last_pm) : "black"; @@ -186,7 +212,8 @@ export default function MeterSelectionMap({ }} icon={L.divIcon({ className: "", - html: `
`, + html: renderMarkerHtml(color, false), + iconSize: [12, 12], })} > {meter.serial_number} @@ -195,6 +222,31 @@ export default function MeterSelectionMap({ })} + {selectedMeter && ( + onMeterSelection(selectedMeter.id), + }} + icon={L.divIcon({ + className: "", + html: renderMarkerHtml( + selectedMeter.last_pm + ? getMeterMarkerColor(selectedMeter.last_pm) + : "black", + true, + ), + iconSize: [22, 22], + })} + zIndexOffset={1000} + > + {selectedMeter.serial_number} + + )} {/* Section GeoJSON */} well.id === selectedWellId) ?? null; const handleSelectWell = (well: Well) => { navigate({ @@ -172,25 +175,51 @@ export default function WellSelectionMap({ }} > {wellQuery.isSuccess && - wellMarkers.map((well: Well) => ( - handleSelectWell(well), - }} - icon={getWellIcon(well)} - > - - {well.name || well.ra_number || well.id} - - - ))} + wellMarkers.map((well: Well) => { + if (well.id === selectedWellId) { + return null; + } + + return ( + handleSelectWell(well), + }} + icon={getWellIcon(well)} + > + + {well.name || well.ra_number || well.id} + + + ); + })} + {selectedWell && ( + handleSelectWell(selectedWell), + }} + icon={getSelectedWellIcon(selectedWell)} + zIndexOffset={1000} + > + + {selectedWell.name || + selectedWell.ra_number || + selectedWell.id} + + + )} @@ -272,3 +301,61 @@ const getWellIcon = (well: Well) => { } return BlueMapIcon; }; + +const createSelectedWellIcon = (icon: L.Icon) => + L.divIcon({ + className: "", + html: ` +
+ + + +
+ `, + iconSize: [42, 68], + iconAnchor: [21, 68], + popupAnchor: [1, -46], + }); + +const SelectedBlueMapIcon = createSelectedWellIcon(BlueMapIcon); +const SelectedRedMapIcon = createSelectedWellIcon(RedMapIcon); +const SelectedBlackMapIcon = createSelectedWellIcon(BlackMapIcon); + +const getSelectedWellIcon = (well: Well) => { + if (well.well_status_id === WellStatus.PLUGGED) { + return SelectedBlackMapIcon; + } + if (well.chloride_group_id != null) { + return SelectedRedMapIcon; + } + return SelectedBlueMapIcon; +}; From 9a0d444df1611eba2282babcce7fe06d77c2665d Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 15:24:44 -0500 Subject: [PATCH 18/22] feat(chlorides_report): Update the logic to caludate north west, north east, south west, and south east --- api/routes/chlorides.py | 40 ++++++----- api/schemas/chlorides.py | 8 +-- api/templates/chlorides_report.html | 50 ++++++------- .../maps/layers/SoutheastGuideLayer.tsx | 40 +++++++++-- .../src/views/Reports/Chlorides/index.tsx | 70 +++++++------------ 5 files changed, 108 insertions(+), 100 deletions(-) diff --git a/api/routes/chlorides.py b/api/routes/chlorides.py index 6b727312..b21f3987 100644 --- a/api/routes/chlorides.py +++ b/api/routes/chlorides.py @@ -107,7 +107,8 @@ def get_chlorides_report( db: Session = Depends(get_db), ): """ - Returns min/max/avg for north/south/east/west halves **within the SE quadrant of New Mexico**, + Returns min/max/avg for north-west/north-east/south-west/south-east quadrants + within the SE quadrant of New Mexico, over the specified [from_month, to_month] inclusive range, for chloride wells in the given group. """ @@ -156,32 +157,33 @@ def get_chlorides_report( ) ] - north_vals: List[float] = [] - south_vals: List[float] = [] - east_vals: List[float] = [] - west_vals: List[float] = [] + north_west_vals: List[float] = [] + north_east_vals: List[float] = [] + south_west_vals: List[float] = [] + south_east_vals: List[float] = [] for val, lat, lon in se_rows: if val is None: continue # skip null chloride values - # North vs South halves within the SE quadrant - if float(lat) >= SE_MID_LAT: - north_vals.append(float(val)) - else: - south_vals.append(float(val)) + lat_value = float(lat) + lon_value = float(lon) + chloride_value = float(val) - # East vs West halves within the SE quadrant - if float(lon) >= SE_MID_LON: - east_vals.append(float(val)) + if lat_value >= SE_MID_LAT and lon_value < SE_MID_LON: + north_west_vals.append(chloride_value) + elif lat_value >= SE_MID_LAT and lon_value >= SE_MID_LON: + north_east_vals.append(chloride_value) + elif lat_value < SE_MID_LAT and lon_value < SE_MID_LON: + south_west_vals.append(chloride_value) else: - west_vals.append(float(val)) + south_east_vals.append(chloride_value) return chlorides.ChlorideReportNums( - north=_stats(north_vals), - south=_stats(south_vals), - east=_stats(east_vals), - west=_stats(west_vals), + north_west=_stats(north_west_vals), + north_east=_stats(north_east_vals), + south_west=_stats(south_west_vals), + south_east=_stats(south_east_vals), ) @@ -196,7 +198,7 @@ def download_chlorides_report_pdf( db: Session = Depends(get_db), ): """ - Generate a PDF chloride report (north/south/east/west stats) + Generate a PDF chloride report (north-west/north-east/south-west/south-east stats) for the SE quadrant of New Mexico. """ # Re-use existing logic diff --git a/api/schemas/chlorides.py b/api/schemas/chlorides.py index 123fdbaa..dfe7ac06 100644 --- a/api/schemas/chlorides.py +++ b/api/schemas/chlorides.py @@ -12,7 +12,7 @@ class MinMaxAvgMedCount(BaseModel): class ChlorideReportNums(BaseModel): - north: MinMaxAvgMedCount - south: MinMaxAvgMedCount - east: MinMaxAvgMedCount - west: MinMaxAvgMedCount + north_west: MinMaxAvgMedCount + north_east: MinMaxAvgMedCount + south_west: MinMaxAvgMedCount + south_east: MinMaxAvgMedCount diff --git a/api/templates/chlorides_report.html b/api/templates/chlorides_report.html index 748c9e01..3e8472ce 100644 --- a/api/templates/chlorides_report.html +++ b/api/templates/chlorides_report.html @@ -46,39 +46,39 @@

Chloride Report

- - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + +
North{{ report.north.min }}{{ report.north.max }}{{ "%.2f"|format(report.north.avg or 0) }}{{ report.north.median }}{{ report.north.count }}North West{{ report.north_west.min }}{{ report.north_west.max }}{{ "%.2f"|format(report.north_west.avg or 0) }}{{ report.north_west.median }}{{ report.north_west.count }}
South{{ report.south.min }}{{ report.south.max }}{{ "%.2f"|format(report.south.avg or 0) }}{{ report.south.median }}{{ report.south.count }}North East{{ report.north_east.min }}{{ report.north_east.max }}{{ "%.2f"|format(report.north_east.avg or 0) }}{{ report.north_east.median }}{{ report.north_east.count }}
East{{ report.east.min }}{{ report.east.max }}{{ "%.2f"|format(report.east.avg or 0) }}{{ report.east.median }}{{ report.east.count }}South West{{ report.south_west.min }}{{ report.south_west.max }}{{ "%.2f"|format(report.south_west.avg or 0) }}{{ report.south_west.median }}{{ report.south_west.count }}
West{{ report.west.min }}{{ report.west.max }}{{ "%.2f"|format(report.west.avg or 0) }}{{ report.west.median }}{{ report.west.count }}South East{{ report.south_east.min }}{{ report.south_east.max }}{{ "%.2f"|format(report.south_east.avg or 0) }}{{ report.south_east.median }}{{ report.south_east.count }}
- \ No newline at end of file + diff --git a/frontend/src/components/maps/layers/SoutheastGuideLayer.tsx b/frontend/src/components/maps/layers/SoutheastGuideLayer.tsx index e921bab3..e3aec2af 100644 --- a/frontend/src/components/maps/layers/SoutheastGuideLayer.tsx +++ b/frontend/src/components/maps/layers/SoutheastGuideLayer.tsx @@ -37,8 +37,11 @@ const vertLine = [ const labelIcon = (text: string) => L.divIcon({ - className: "", + className: "chlorides-report-label", + iconSize: [0, 0], + iconAnchor: [0, 0], html: `
${text}
`, }); @@ -83,11 +87,35 @@ export const SoutheastGuideLayer = ({ pathOptions={{ color: "#1976d2", weight: 2, dashArray: "6 6" }} /> - {/* Labels (placed toward the center of each half) */} - - - - + {/* Labels placed near the center of each quadrant */} + + + + {/* Optional: center dot where lines cross */} {/* ' })} /> */} diff --git a/frontend/src/views/Reports/Chlorides/index.tsx b/frontend/src/views/Reports/Chlorides/index.tsx index 720caa7c..44db5723 100644 --- a/frontend/src/views/Reports/Chlorides/index.tsx +++ b/frontend/src/views/Reports/Chlorides/index.tsx @@ -89,10 +89,10 @@ interface iMinMaxAvgMedCount { } interface iChlorideReportNums { - north: iMinMaxAvgMedCount; - south: iMinMaxAvgMedCount; - east: iMinMaxAvgMedCount; - west: iMinMaxAvgMedCount; + north_west: iMinMaxAvgMedCount; + north_east: iMinMaxAvgMedCount; + south_west: iMinMaxAvgMedCount; + south_east: iMinMaxAvgMedCount; } const isoToDayjs = (s?: string, fallback?: Dayjs) => @@ -107,6 +107,12 @@ const OVERLAY_NAMES = [ ] as const; const DEFAULT_BASE_LAYER = "OpenStreetMap"; const DEFAULT_OVERLAYS = ["Clorides Report Region Guide", "Wells"]; +const REPORT_SECTIONS = [ + { title: "North West", key: "north_west" }, + { title: "North East", key: "north_east" }, + { title: "South West", key: "south_west" }, + { title: "South East", key: "south_east" }, +] as const; export const ChloridesReportView = () => { const navigate = useNavigate(); @@ -318,8 +324,8 @@ export const ChloridesReportView = () => { {chloridesQuery.isLoading && ( - {[0, 1, 2, 3].map((i) => ( - + {REPORT_SECTIONS.map(({ key }) => ( + { )} {!chloridesQuery.isLoading && !chloridesQuery.isError && ( - - - - - - - - - - - - + {REPORT_SECTIONS.map(({ title, key }) => ( + + + + ))} )} From 04a418949941cc9c608a33c885b7a0ab70c71528 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 16:26:58 -0500 Subject: [PATCH 19/22] fix(MeterMap): Update logic to fallback to lastest Location Only if no PM associated --- api/routes/meters.py | 49 ++++++++++++++++--- api/schemas/meter.py | 3 +- frontend/src/interfaces/MeterMapDTO.ts | 3 +- .../MeterSelection/MeterSelectionMap.tsx | 15 +++--- 4 files changed, 55 insertions(+), 15 deletions(-) diff --git a/api/routes/meters.py b/api/routes/meters.py index 77eca4f7..def625b9 100644 --- a/api/routes/meters.py +++ b/api/routes/meters.py @@ -213,28 +213,62 @@ def get_meters_locations( ActivityTypeLU.name == "Preventative Maintenance" ) ).first() + location_only_activity_type_id = db.scalars( + select(ActivityTypeLU.id).where(ActivityTypeLU.name == "Location Only") + ).first() if not pm_activity_type_id: raise HTTPException( status_code=500, detail="Preventative Maintenance activity type is not configured.", ) + if not location_only_activity_type_id: + raise HTTPException( + status_code=500, + detail="Location Only activity type is not configured.", + ) - # Query latest PMs for those meters - pm_query = text( + # Query latest PMs tied directly to the meter + meter_pm_query = text( """ - SELECT MAX(timestamp_start) AS last_pm, meter_id + SELECT MAX(timestamp_start) AS last_pm_meter_activity, meter_id FROM "MeterActivities" WHERE activity_type_id = :pm_activity_type_id AND meter_id = ANY(:mids) GROUP BY meter_id """ ) - pm_years = db.execute( - pm_query, + meter_pm_rows = db.execute( + meter_pm_query, {"mids": meter_ids, "pm_activity_type_id": pm_activity_type_id}, ).fetchall() - pm_dict = {row.meter_id: row.last_pm for row in pm_years} + meter_pm_dict = { + row.meter_id: row.last_pm_meter_activity for row in meter_pm_rows + } + + location_only_dict = {} + + if meter_ids: + location_only_query = text( + """ + SELECT MAX(timestamp_start) AS last_location_only_meter_activity, meter_id + FROM "MeterActivities" + WHERE activity_type_id = :location_only_activity_type_id + AND meter_id = ANY(:mids) + GROUP BY meter_id + """ + ) + location_only_rows = db.execute( + location_only_query, + { + "mids": meter_ids, + "location_only_activity_type_id": location_only_activity_type_id, + }, + ).fetchall() + location_only_dict = { + row.meter_id: row.last_location_only_meter_activity + for row in location_only_rows + } # Map to DTOs manually for added performance meter_map_list = [] @@ -254,7 +288,8 @@ def get_meters_locations( "longitude": row.longitude, "trss": row.trss, }, - last_pm=pm_dict.get(row.id), + last_pm_meter_activity=meter_pm_dict.get(row.id), + last_location_only_meter_activity=location_only_dict.get(row.id), ) ) diff --git a/api/schemas/meter.py b/api/schemas/meter.py index 1015f7ab..4186fd48 100644 --- a/api/schemas/meter.py +++ b/api/schemas/meter.py @@ -74,7 +74,8 @@ class LocationDTO(ORMBase): serial_number: str well: WellDTO | None = None location: LocationDTO | None = None - last_pm: datetime | None = None + last_pm_meter_activity: datetime | None = None + last_location_only_meter_activity: datetime | None = None class MeterStatusLU(ORMBase): diff --git a/frontend/src/interfaces/MeterMapDTO.ts b/frontend/src/interfaces/MeterMapDTO.ts index 55b97326..82f5b7dc 100644 --- a/frontend/src/interfaces/MeterMapDTO.ts +++ b/frontend/src/interfaces/MeterMapDTO.ts @@ -9,5 +9,6 @@ export interface MeterMapDTO { longitude: number; latitude: number; }; - last_pm: string; + last_pm_meter_activity: string | null; + last_location_only_meter_activity: string | null; } diff --git a/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx b/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx index 1436dcd2..23e1a55b 100644 --- a/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx +++ b/frontend/src/views/Meters/MeterSelection/MeterSelectionMap.tsx @@ -121,6 +121,13 @@ export default function MeterSelectionMap({ ">`; }; + const getMarkerColor = (meter: MeterMapDTO) => { + const markerDate = + meter.last_pm_meter_activity ?? meter.last_location_only_meter_activity; + + return markerDate ? getMeterMarkerColor(markerDate) : "black"; + }; + return ( <> Date: Wed, 25 Mar 2026 16:31:00 -0500 Subject: [PATCH 20/22] fix(Topbar): Patch menu to display user's display name not full name --- frontend/src/components/layout/Topbar.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/layout/Topbar.tsx b/frontend/src/components/layout/Topbar.tsx index 69ae03f9..091eceab 100644 --- a/frontend/src/components/layout/Topbar.tsx +++ b/frontend/src/components/layout/Topbar.tsx @@ -72,6 +72,7 @@ export const Topbar = ({ const user = authUser(); const role: string = user?.user_role?.name; const fullName = user?.full_name ?? user?.display_name ?? "Unknown"; + const displayName = user?.display_name ?? "Unknown"; const email = user?.email ?? "No email available"; const isLoggedIn = !!user; const unreadNotificationsQuery = useGetUnreadNotificationCount({ @@ -384,7 +385,7 @@ export const Topbar = ({ }} noWrap > - {fullName} + {displayName} Date: Wed, 25 Mar 2026 17:23:09 -0500 Subject: [PATCH 21/22] feat(Settings): update ui of known and session history section --- .../components/KnownDevicesSection.tsx | 32 ++++++----- .../components/SessionHistorySection.tsx | 53 +++++++++++++------ .../Settings/components/SessionShared.tsx | 11 ++-- 3 files changed, 64 insertions(+), 32 deletions(-) diff --git a/frontend/src/views/Settings/components/KnownDevicesSection.tsx b/frontend/src/views/Settings/components/KnownDevicesSection.tsx index 6878bd91..cb61d766 100644 --- a/frontend/src/views/Settings/components/KnownDevicesSection.tsx +++ b/frontend/src/views/Settings/components/KnownDevicesSection.tsx @@ -1,4 +1,4 @@ -import { Alert, Box, Skeleton, Stack } from "@mui/material"; +import { Alert, Box, Skeleton, Stack, Typography } from "@mui/material"; import { alpha } from "@mui/material/styles"; import { KnownDeviceSummary } from "@/interfaces"; import { @@ -51,29 +51,31 @@ function KnownDeviceRow({ device }: { device: KnownDeviceSummary }) { - + Sessions - {device.session_count} + {device.session_count} - + Active now - {device.active_session_count} + + {device.active_session_count} + - + First seen - + {formatDateTime(device.signed_in_at_first)} - + - + Last seen - + {formatDateTime(device.last_seen_at)} - + @@ -94,8 +96,10 @@ export function KnownDevicesSection({ <> {isLoading ? ( - - + + + + ) : isError ? ( Unable to load known devices right now. diff --git a/frontend/src/views/Settings/components/SessionHistorySection.tsx b/frontend/src/views/Settings/components/SessionHistorySection.tsx index c009d48f..77ad5c00 100644 --- a/frontend/src/views/Settings/components/SessionHistorySection.tsx +++ b/frontend/src/views/Settings/components/SessionHistorySection.tsx @@ -6,6 +6,7 @@ import { Skeleton, Stack, Switch, + Tooltip, Typography, } from "@mui/material"; import { alpha } from "@mui/material/styles"; @@ -75,30 +76,52 @@ function SessionRow({ + {formatDateTime(session.signed_in_at)} + + } /> + + + {formatDateTime(session.last_seen_at)} + + + } /> + + {session.sign_out_reason_name + ? formatReasonLabel(session.sign_out_reason_name) + : "Signed out"} + + ) : ( + "Still Active" + ) } /> @@ -164,13 +187,13 @@ export function SessionHistorySection({ onChange={(_, checked) => onShowClosedSessionsChange(checked)} /> } - label={`Show Closed Session${closedSessions?.length > 1 ? "s" : null}`} + label={`Show Closed Session${(closedSessions?.length ?? 0) > 1 ? "s" : ""}`} /> {isLoading ? ( - - + + ) : isError ? ( diff --git a/frontend/src/views/Settings/components/SessionShared.tsx b/frontend/src/views/Settings/components/SessionShared.tsx index cb67366b..407102f6 100644 --- a/frontend/src/views/Settings/components/SessionShared.tsx +++ b/frontend/src/views/Settings/components/SessionShared.tsx @@ -16,8 +16,9 @@ import type { ComponentType } from "react"; function parseUtcDate(value?: string | null) { if (!value) return null; - const normalizedValue = - /[zZ]|[+-]\d{2}:\d{2}$/.test(value) ? value : `${value}Z`; + const normalizedValue = /[zZ]|[+-]\d{2}:\d{2}$/.test(value) + ? value + : `${value}Z`; const parsedDate = new Date(normalizedValue); return Number.isNaN(parsedDate.getTime()) ? null : parsedDate; @@ -63,7 +64,11 @@ export function formatRelativeTime(value?: string | null) { export function formatReasonLabel(value?: string | null) { if (!value) return ""; - return value.split("_").join(" "); + + return value + .split("_") + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(" "); } export function getDeviceIcon( From b3bb6f141b4b55e1e19b7a9e284f2a129fc41d22 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 25 Mar 2026 17:46:56 -0500 Subject: [PATCH 22/22] chore(package-lock): Update pkgs --- frontend/package-lock.json | 1207 +++++++++++++++++++----------------- 1 file changed, 621 insertions(+), 586 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index a05b7ff4..6e6f7797 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -235,23 +235,23 @@ } }, "node_modules/@babel/helpers": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", - "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.29.2.tgz", + "integrity": "sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==", "dev": true, "license": "MIT", "dependencies": { "@babel/template": "^7.28.6", - "@babel/types": "^7.28.6" + "@babel/types": "^7.29.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", - "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", + "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", "license": "MIT", "dependencies": { "@babel/types": "^7.29.0" @@ -296,9 +296,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", - "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.29.2.tgz", + "integrity": "sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -379,9 +379,9 @@ } }, "node_modules/@dicebear/adventurer": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/adventurer/-/adventurer-9.4.0.tgz", - "integrity": "sha512-VfTOSc6XRdRGjdkTSC7AHmV1HdGlmUQ4/6TCb570uLsPFyFkG7nCVQYjbWZun3BilIQsyIuLSSWxrZWR+XH/rg==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/adventurer/-/adventurer-9.4.2.tgz", + "integrity": "sha512-jqYp834ZmGDA9HBBDQAdgF1O2UTCwHF4vVrktXWa2Dppp1JczPL5HnVOWsjtrLmXNn61Wd6OLmBb2e6rhzp3ig==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -391,9 +391,9 @@ } }, "node_modules/@dicebear/adventurer-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/adventurer-neutral/-/adventurer-neutral-9.4.0.tgz", - "integrity": "sha512-zlpEF4KJhfl96j0M6wPmgaUVz20VKYZziIcIvf9pqGrvsTl1kDnoBtpmAROuU3e7FeCqDhk4qSQvorusW+L62g==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/adventurer-neutral/-/adventurer-neutral-9.4.2.tgz", + "integrity": "sha512-5xgkG/mNL4j3Q4SJGQLBU/KnU90tng8Ze5ofThD+55wi0oeY/nSAUowg6UFCmHrktjifj/MEx3CQqbpcPWtfIA==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -403,9 +403,9 @@ } }, "node_modules/@dicebear/avataaars": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/avataaars/-/avataaars-9.4.0.tgz", - "integrity": "sha512-zqpXcl+RHza3DeN3WcqtXMkQanI6wHUg/plJFb+uqI4KeXkJ6NBVsHNH7A4EImY/XZ4H3nw1g30io//ji5bxkw==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/avataaars/-/avataaars-9.4.2.tgz", + "integrity": "sha512-3x9jKFkOkFSPmpTbt9xvhiU2E1GX7beCSsX0tXRUShj8x6+5Ks9yBRT1VlkySbnXrZ/GglADGg7vJ/D2uIx1Yw==", "license": "See LICENSE file", "engines": { "node": ">=18.0.0" @@ -415,9 +415,9 @@ } }, "node_modules/@dicebear/avataaars-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/avataaars-neutral/-/avataaars-neutral-9.4.0.tgz", - "integrity": "sha512-tGtmnBfjgdElgKouzEuIdJXQ0makePI1rZnVLW5hJxA6A3xWEAQOIHCqTA0UDBHjM/uJP5lspxUIJrJHU76/8Q==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/avataaars-neutral/-/avataaars-neutral-9.4.2.tgz", + "integrity": "sha512-/eNrp0YCNJRwQXqOloLm1+3Ss2C+pMpUQIGkbEnGsP1UK+13Ge80ggDDof1HpdqvG9HAZcKa7hnbG/0HSwyDSw==", "license": "See LICENSE file", "engines": { "node": ">=18.0.0" @@ -427,9 +427,9 @@ } }, "node_modules/@dicebear/big-ears": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/big-ears/-/big-ears-9.4.0.tgz", - "integrity": "sha512-d43CWzswbwed4q1RZFxt1qlhQfqzPGZVwGe0/+PZIr1B4U8y3/AqT7y1TptTdk6lL65XNhJKM30cxn72+x5fTA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/big-ears/-/big-ears-9.4.2.tgz", + "integrity": "sha512-mNfz3ppNA7UBq0IO3nXCiV5pFPG7c1DfzRB0foNU2Wo1XXT8FIcSY2BvDlYqorZTOUOz7dHb0vx06hqvG0HP5w==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -439,9 +439,9 @@ } }, "node_modules/@dicebear/big-ears-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/big-ears-neutral/-/big-ears-neutral-9.4.0.tgz", - "integrity": "sha512-xUJGFriKkBEs4dRe8rZ7fqT49x0JgOVwpl1A5hYXYI6BPZqyX4wfCPPynyPtYyZDWy+nuCWxFgc2fZCBV/hW7g==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/big-ears-neutral/-/big-ears-neutral-9.4.2.tgz", + "integrity": "sha512-M8Ozmzza4eY4hpLOYULgJxMYmBA0CsBnrE15/xw6LZkEREXnrX5z0NJsf8hUfdyF6BWZ+RBgzoiav32DAC5zcg==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -451,9 +451,9 @@ } }, "node_modules/@dicebear/big-smile": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/big-smile/-/big-smile-9.4.0.tgz", - "integrity": "sha512-LPXCc11Yw/p54OYNjyyiNoCdqXybuAWJRxkcpThx9S/TKouuwnEroj5PL3b1+unreCHtMDzkcO9dia7mqX9DYQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/big-smile/-/big-smile-9.4.2.tgz", + "integrity": "sha512-hmT5i7rcPPhStjZyg28pbIhdTnnMBzK3RObI0vKCpY30EFrzaPkkdDL6Ck5fAFBdvDIW1EpOJkenyR0XPmhgbQ==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -463,9 +463,9 @@ } }, "node_modules/@dicebear/bottts": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/bottts/-/bottts-9.4.0.tgz", - "integrity": "sha512-vuFC5HRfzla7YH2s02CBrxBr+ninbZu9PtO3a72JoO8Da02/POI7RF3WjjlzfRG4+i5NHyn77gKsl2cy8rTTXA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/bottts/-/bottts-9.4.2.tgz", + "integrity": "sha512-tsx+dII7EFUCVA8URj66G1GqORCCVduCAx4dY2prEY2IeFianVpkntXuFsWZ9BBGx1NZFndvDith5oTwKMQPbQ==", "license": "See LICENSE file", "engines": { "node": ">=18.0.0" @@ -475,9 +475,9 @@ } }, "node_modules/@dicebear/bottts-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/bottts-neutral/-/bottts-neutral-9.4.0.tgz", - "integrity": "sha512-ACIM6Cu0es4TdMA0jHUlKtWh50AZS0HJ5ykeBueZpPhMMGbjkRV90Sit/4+I2ghTOZ6Veug+UjEKz4VUbkfKwA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/bottts-neutral/-/bottts-neutral-9.4.2.tgz", + "integrity": "sha512-kFNwWt6j+gzZ5n5Pz7WVwePubREAQOF8ZwWA9ztwVYDVMLnOChWbAofy5FED4j5md2MXFH2EgLCFCMr5K2BmIA==", "license": "See LICENSE file", "engines": { "node": ">=18.0.0" @@ -487,42 +487,42 @@ } }, "node_modules/@dicebear/collection": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/collection/-/collection-9.4.0.tgz", - "integrity": "sha512-OVMKwwS+npvbkJeOSIhtciOemUx//o1TpgwoOwGMffywsalL7+Mz9he/i6kT3xxi4mVkFDR46rtz4J/VlexXnQ==", - "license": "MIT", - "dependencies": { - "@dicebear/adventurer": "9.4.0", - "@dicebear/adventurer-neutral": "9.4.0", - "@dicebear/avataaars": "9.4.0", - "@dicebear/avataaars-neutral": "9.4.0", - "@dicebear/big-ears": "9.4.0", - "@dicebear/big-ears-neutral": "9.4.0", - "@dicebear/big-smile": "9.4.0", - "@dicebear/bottts": "9.4.0", - "@dicebear/bottts-neutral": "9.4.0", - "@dicebear/croodles": "9.4.0", - "@dicebear/croodles-neutral": "9.4.0", - "@dicebear/dylan": "9.4.0", - "@dicebear/fun-emoji": "9.4.0", - "@dicebear/glass": "9.4.0", - "@dicebear/icons": "9.4.0", - "@dicebear/identicon": "9.4.0", - "@dicebear/initials": "9.4.0", - "@dicebear/lorelei": "9.4.0", - "@dicebear/lorelei-neutral": "9.4.0", - "@dicebear/micah": "9.4.0", - "@dicebear/miniavs": "9.4.0", - "@dicebear/notionists": "9.4.0", - "@dicebear/notionists-neutral": "9.4.0", - "@dicebear/open-peeps": "9.4.0", - "@dicebear/personas": "9.4.0", - "@dicebear/pixel-art": "9.4.0", - "@dicebear/pixel-art-neutral": "9.4.0", - "@dicebear/rings": "9.4.0", - "@dicebear/shapes": "9.4.0", - "@dicebear/thumbs": "9.4.0", - "@dicebear/toon-head": "9.4.0" + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/collection/-/collection-9.4.2.tgz", + "integrity": "sha512-KArubv7if8H7j9sIfpDK2hJJqrdNVR5zMPAMOSpIU2JPyXx8TC9o5wsmXb8il5wOHgaS9Q/cla7jUNIiDD7Gsg==", + "license": "MIT", + "dependencies": { + "@dicebear/adventurer": "9.4.2", + "@dicebear/adventurer-neutral": "9.4.2", + "@dicebear/avataaars": "9.4.2", + "@dicebear/avataaars-neutral": "9.4.2", + "@dicebear/big-ears": "9.4.2", + "@dicebear/big-ears-neutral": "9.4.2", + "@dicebear/big-smile": "9.4.2", + "@dicebear/bottts": "9.4.2", + "@dicebear/bottts-neutral": "9.4.2", + "@dicebear/croodles": "9.4.2", + "@dicebear/croodles-neutral": "9.4.2", + "@dicebear/dylan": "9.4.2", + "@dicebear/fun-emoji": "9.4.2", + "@dicebear/glass": "9.4.2", + "@dicebear/icons": "9.4.2", + "@dicebear/identicon": "9.4.2", + "@dicebear/initials": "9.4.2", + "@dicebear/lorelei": "9.4.2", + "@dicebear/lorelei-neutral": "9.4.2", + "@dicebear/micah": "9.4.2", + "@dicebear/miniavs": "9.4.2", + "@dicebear/notionists": "9.4.2", + "@dicebear/notionists-neutral": "9.4.2", + "@dicebear/open-peeps": "9.4.2", + "@dicebear/personas": "9.4.2", + "@dicebear/pixel-art": "9.4.2", + "@dicebear/pixel-art-neutral": "9.4.2", + "@dicebear/rings": "9.4.2", + "@dicebear/shapes": "9.4.2", + "@dicebear/thumbs": "9.4.2", + "@dicebear/toon-head": "9.4.2" }, "engines": { "node": ">=18.0.0" @@ -532,9 +532,9 @@ } }, "node_modules/@dicebear/core": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/core/-/core-9.4.0.tgz", - "integrity": "sha512-uoAG5mPBX+kQTtVerWUoH5e7rezG+DV/vJ5icd/kGooGyylH0nuJIlA6todkKGQv+/b0QNo+EzNF6Nc4UTE3wQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/core/-/core-9.4.2.tgz", + "integrity": "sha512-MF0042+Z3s8PGZKZLySfhft28bUa3B1iq0e5NSjCvY8gfMi5aIH/iRJGRJa1N9Jz1BNkxYb4yvJ/N9KO8Z6Y+w==", "license": "MIT", "dependencies": { "@types/json-schema": "^7.0.15" @@ -544,9 +544,9 @@ } }, "node_modules/@dicebear/croodles": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/croodles/-/croodles-9.4.0.tgz", - "integrity": "sha512-tC68VGu0XOtDd4aOORvchtRy1EMphuTWCl/vDIlS9zuKJJxIJCh0r7mREn/Azds07Hdg1R1Mr8j85tdVonEpgQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/croodles/-/croodles-9.4.2.tgz", + "integrity": "sha512-6VoO0JviIf7dKKMBTL/SMXxWhnXHaZuzufX90G0nXxS77ELG1YkGNMaZzawizN4C09Gbya2gJkozqrWiJN/aGw==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -556,9 +556,9 @@ } }, "node_modules/@dicebear/croodles-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/croodles-neutral/-/croodles-neutral-9.4.0.tgz", - "integrity": "sha512-kRFE46B+WfGU4yDaD0ESSvt9A6CBtxuR7sGcFJ4YhK4T/O+tnP+iqRuQ3+ob1oNdEW3oQaD9aBioi3hBfbrrBA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/croodles-neutral/-/croodles-neutral-9.4.2.tgz", + "integrity": "sha512-oG5IeUdtiYshQ89gkAVcl5w3xAEi5UZX2fTzIyelpBPCG176l7VuuFzlxi2umnB3E6LVHYy06DXvUo/p+rXB2Q==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -568,9 +568,9 @@ } }, "node_modules/@dicebear/dylan": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/dylan/-/dylan-9.4.0.tgz", - "integrity": "sha512-1HxZyVmPf5ElERs4NqDtWHw6OBDae5v6t4zspCXRzMH/H0onwlbx3uAZDNGFdPgah8bSV3MhAzhggTCNcWtMxw==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/dylan/-/dylan-9.4.2.tgz", + "integrity": "sha512-1vQvRu9x9DrwFxhFaIU2rf0EUL04yDTbAt7fHyAjM0mEsKzTD4mRNf95tCRuavCoW6W48u7A/OY6jyIub6kxLQ==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -580,9 +580,9 @@ } }, "node_modules/@dicebear/fun-emoji": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/fun-emoji/-/fun-emoji-9.4.0.tgz", - "integrity": "sha512-dDOw30RfCNfqqeXny4eQLgyMEXfZ0Y5Gz+rSPCuXGw735rCF+Wehyy4tzl2icCkXhWK9attlAY9anjV45k/2aQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/fun-emoji/-/fun-emoji-9.4.2.tgz", + "integrity": "sha512-kqB6LPkdYCdEU/mwbyz34xLzoNUKL6ARcoo3fr5ASq9D6ZE07qIKybC3xv5+CPz7VmspJ1Q3c/VVWVMDRP7Twg==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -592,9 +592,9 @@ } }, "node_modules/@dicebear/glass": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/glass/-/glass-9.4.0.tgz", - "integrity": "sha512-piYKjXTPiTmdgkEW8OEAQNTbcAwtI0+iR2ODfKWnWBy8lM+rnY4TmBi3RgMFJXLFqjPgu38SXTsd2bWAfVa4MQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/glass/-/glass-9.4.2.tgz", + "integrity": "sha512-z5qUogHQ1b6UJ2zCqT848mU2U9DKbVDhiX6GPDjD7tYLisCCJVisH9p6WyNdHvflUd4SHkA6gRqVJIh2v2HnTA==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -604,9 +604,9 @@ } }, "node_modules/@dicebear/icons": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/icons/-/icons-9.4.0.tgz", - "integrity": "sha512-iwA4uM8E9B9kCEMJfxvgfDGje3h2ZE84SDuvJjjCWWZP/LJ5YX50QcRrfknRffD439DXJsKdXy9ku4OB5G7TkQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/icons/-/icons-9.4.2.tgz", + "integrity": "sha512-QSMMz0NA03ypSGhXC8HQX8FSj8lYT+/5yqH+/N03OH2IjL0q7wwGZ7nqsrtlRp76O5WqMTwGfSbTUUYPjFr+Xw==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -616,9 +616,9 @@ } }, "node_modules/@dicebear/identicon": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/identicon/-/identicon-9.4.0.tgz", - "integrity": "sha512-6X5z7oHeGPuw9i7DaHQAQdHGAu9KYUgTZx8lWLJH/wutzCkygpNm7P0Q1FaP8zmdLkhj4AknQOoZ5AW0kaW4Lg==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/identicon/-/identicon-9.4.2.tgz", + "integrity": "sha512-JVDSmZsv11mSWqwAktK5x9Bslht2xY3TFUn8xzu6slAYe1Z7hEXZ76eb+UJ6F4qEzdwZ7xPWzAS6Nb0Y3A0pww==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -628,9 +628,9 @@ } }, "node_modules/@dicebear/initials": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/initials/-/initials-9.4.0.tgz", - "integrity": "sha512-Qt0jDQKyo63HD8o3mXgb+PzM0L01BWpURtrEETZEGgES+C3Qz5fQPbVDdkKSNXn5yyjv6LbdniJJUjTxDmQAQw==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/initials/-/initials-9.4.2.tgz", + "integrity": "sha512-yePuIUasmwtl9IrtB6rEzE/zb5fImKP/neW0CdcTC2MwLgMuP1GLHEGRgg1zI8exIh+PMv1YdLGyyUuRTE2Qpw==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -640,9 +640,9 @@ } }, "node_modules/@dicebear/lorelei": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/lorelei/-/lorelei-9.4.0.tgz", - "integrity": "sha512-P91tqHckYj+IPw906F3SQwKvIMClJFwfYb4mvJGYoy/PyQVcRdT7ziKbYrG70bHKgdSEQSAarOdLH4EDLX4IpA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/lorelei/-/lorelei-9.4.2.tgz", + "integrity": "sha512-YMv6vnriW6VLFDsreKuOnUFFno6SRe7+7X7R7zPY0rZ+MaHX9V3jcioIG+1PSjIHEDfOLUHpr5vd1JBWv8y7UA==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -652,9 +652,9 @@ } }, "node_modules/@dicebear/lorelei-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/lorelei-neutral/-/lorelei-neutral-9.4.0.tgz", - "integrity": "sha512-3ceiazxgIN/9p6Ndg6X76N+RH61PSg0+717YiAZ5WN/epia/UUYzsZ5RrLyjrdq30SRNeHawp58qbAkOYMWD7g==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/lorelei-neutral/-/lorelei-neutral-9.4.2.tgz", + "integrity": "sha512-yspanTthA5vh6iCdeLzn6xZ4yYMYRcfcxblcgSvHTF1ut0bjAXtw5SXzZ6aJTrJWiHkzYOQuTOR6GVYiW80Q7w==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -664,9 +664,9 @@ } }, "node_modules/@dicebear/micah": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/micah/-/micah-9.4.0.tgz", - "integrity": "sha512-fMtENHrq7ZFNt+HpZTP0yr06dw76ur6SCjMK1eQBX6fwgtJ8HkHa/4TjhpjvQTarJJPs6FDPtGkHcYKCehBUNw==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/micah/-/micah-9.4.2.tgz", + "integrity": "sha512-e4D3W/OlChSsLo7Llwsy0J18vk0azJqF/uFoY+EKACCNHBc1HGNsqVvu2CTf+OWOA8wTyAK6UkjBN5p01r7D+g==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -676,9 +676,9 @@ } }, "node_modules/@dicebear/miniavs": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/miniavs/-/miniavs-9.4.0.tgz", - "integrity": "sha512-Gh4C8xF3vRM+FkEtfiYWLaRYCZP1Bzdg/gjLqvn/rJ9TCo645KksPcpABShZv7BPbOCkr17lhSrfBmlRjQnzkQ==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/miniavs/-/miniavs-9.4.2.tgz", + "integrity": "sha512-wLwyFNNUnDRd3BbhSBhXR0XEpX8sG0/xDA5M/OkDoapLqZnnI48YLUSDd2N5QTAVMmcSEuZOYxkcnj7WW79vlg==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -688,9 +688,9 @@ } }, "node_modules/@dicebear/notionists": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/notionists/-/notionists-9.4.0.tgz", - "integrity": "sha512-MgZuW5of3b3cjLFi+D+iONZ3t/t9TZHYUyBXDmRxgeQW+l6td3n8Mjg8eA81jbzVC2RNyxKCOjZu6EyjyX88tA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/notionists/-/notionists-9.4.2.tgz", + "integrity": "sha512-ZCySq+nxcD/x4xyYgytcj2N9uY3gxrL+qpnmOdp2BdA221KacVrxlsUPpIgEMqxS2rMmBQXfxg129Pzn4ycIpA==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -700,9 +700,9 @@ } }, "node_modules/@dicebear/notionists-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/notionists-neutral/-/notionists-neutral-9.4.0.tgz", - "integrity": "sha512-wzg/NLcIzSM2O8IXcEFucYLJypS7I3VKmBsn4ShdM1qQ5nNlA8Ig3e9GKkfxRS2K+xTNDHyXuXNB88pj5Uzmig==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/notionists-neutral/-/notionists-neutral-9.4.2.tgz", + "integrity": "sha512-AyD9kEfVxQUwDGf4Op059gVmYIOAkTKg3dtE9h9mEKP7zl/kMy5B67BFFOo7sB0mXCjzAegZ6ekGU02E8+hIHw==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -712,9 +712,9 @@ } }, "node_modules/@dicebear/open-peeps": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/open-peeps/-/open-peeps-9.4.0.tgz", - "integrity": "sha512-IxbfUWoYEUFdqYqz0iLYODbShV3GWx0t2Afq4pw6KTSewusjMIuYlvyK4z8cFkc2Ai/7VXRBLvQd+YA8KRMpIw==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/open-peeps/-/open-peeps-9.4.2.tgz", + "integrity": "sha512-i01tLgtp2g937T81sVeAOVlqsCtiTck/Kw20g7hN80+7xrXjOUepz2HPLy3HeiMjwjMGRy5o54kSd0/8Ht4Dqg==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -724,9 +724,9 @@ } }, "node_modules/@dicebear/personas": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/personas/-/personas-9.4.0.tgz", - "integrity": "sha512-CjmIiOEwEmQeccIF0U7uzzBLOn9PWNFz87vAAiToWVzA4pVuzHgA+OiKzC6n91lZfRy76bGL1JtR8/ZppCN0YA==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/personas/-/personas-9.4.2.tgz", + "integrity": "sha512-NJlkvI5F5gugt6t2+7QrYNTwQC7+4IQZS3vG0dYk2BncxOHax0BuLovdSdiAesTL4ZkytFYIydWmKmV2/xcUwg==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=18.0.0" @@ -736,9 +736,9 @@ } }, "node_modules/@dicebear/pixel-art": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/pixel-art/-/pixel-art-9.4.0.tgz", - "integrity": "sha512-oQm9pGOaYCgfnxtzNY8xaJa3ZBH12xd7p4UT35ghvtRgk394uCnmz/bg71tnj2ynwVmZ4s5gBoWlUymnTvvCOw==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/pixel-art/-/pixel-art-9.4.2.tgz", + "integrity": "sha512-peHf7oKICDgBZ8dUyj+txPnS7VZEWgvKE+xW4mNQqBt6dYZIjmva2shOVHn0b1JU+FDxMx3uIkWVixKdUq4WGg==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -748,9 +748,9 @@ } }, "node_modules/@dicebear/pixel-art-neutral": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/pixel-art-neutral/-/pixel-art-neutral-9.4.0.tgz", - "integrity": "sha512-OGYFbow6Hu345OObR0dPOAImuGP5vFqNkzkfkEPF4DPbLnCa3RjpeoCkyB+/Gvz7qAtyRR8W57Tfj6PQVRLLXg==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/pixel-art-neutral/-/pixel-art-neutral-9.4.2.tgz", + "integrity": "sha512-9e9Lz554uQvWaXV2P17ss+hPa6rTyuAKBtB8zk8ECjHiZzIl61N/KcTVLZ4dILVZwj7gYriaLo16QEqvL2GJCg==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -760,9 +760,9 @@ } }, "node_modules/@dicebear/rings": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/rings/-/rings-9.4.0.tgz", - "integrity": "sha512-lEhPwUd/uZFLAWM296/aNSGaCyT9NaTXm6V3izFtD8pywceze+sV3s46uLKpvCKUEcI4ia5iMERV35EH5P2ixg==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/rings/-/rings-9.4.2.tgz", + "integrity": "sha512-Pc3ymWrRDQPJFNrbbLt7RJrzGvUuuxUiDkrfLhoVE+B6mZWEL1PC78DPbS1yUWYLErJOpJuM2GSwXmTbVjWf+g==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -772,9 +772,9 @@ } }, "node_modules/@dicebear/shapes": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/shapes/-/shapes-9.4.0.tgz", - "integrity": "sha512-WTH1j6xqwdzBYiTPsCECqlB7kYC0TIbdlg49jEZJp9qP0tguVMH+M7GmWY5TO2chTRmYjJREmgvZWPgmE1Sd9Q==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/shapes/-/shapes-9.4.2.tgz", + "integrity": "sha512-AFL6jAaiLztvcqyq+ds+lWZu6Vbp3PlGWhJeJRm842jxtiluJpl6r4f6nUXP2fdMz7MNpDzXfLooQK9E04NbUQ==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -784,9 +784,9 @@ } }, "node_modules/@dicebear/thumbs": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/thumbs/-/thumbs-9.4.0.tgz", - "integrity": "sha512-eppbqo+3CvlDF4cwWNBsdNmtXHkVaj5AvM9KimVBWdp0S98foTTekCaQCBCmDfATywVXEGk+GaThTZdYgIE/0Q==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/thumbs/-/thumbs-9.4.2.tgz", + "integrity": "sha512-ccWvDBqbkWS5uzHbsg5L6uML6vBfX7jT3J3jHCQksvz8haHItxTK02w+6e1UavZUsvza4lG5X/XY3eji3siJ4Q==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -796,9 +796,9 @@ } }, "node_modules/@dicebear/toon-head": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/@dicebear/toon-head/-/toon-head-9.4.0.tgz", - "integrity": "sha512-3u4ghFUFhnV1LYAfbltihOnASCk4qeWYLjg8B9U6drovrxY4yfX13vqNzQePormLvehXKpm9+gKbmy4kMt2w+g==", + "version": "9.4.2", + "resolved": "https://registry.npmjs.org/@dicebear/toon-head/-/toon-head-9.4.2.tgz", + "integrity": "sha512-lwFeSXyAnaKnCfMt9TiJwnD1cXQUGkey/0h6i/+4TVHVMCz5/Ri5u1ynovPNHy1SnBf858QwoXHkxilGLwQX/g==", "license": "(MIT AND CC-BY-4.0)", "engines": { "node": ">=16.0.0" @@ -954,9 +954,9 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", - "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", + "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", "cpu": [ "ppc64" ], @@ -971,9 +971,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", - "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", + "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", "cpu": [ "arm" ], @@ -988,9 +988,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", - "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", + "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", "cpu": [ "arm64" ], @@ -1005,9 +1005,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", - "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", + "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", "cpu": [ "x64" ], @@ -1022,9 +1022,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", - "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", + "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", "cpu": [ "arm64" ], @@ -1039,9 +1039,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", - "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", + "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", "cpu": [ "x64" ], @@ -1056,9 +1056,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", - "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", + "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", "cpu": [ "arm64" ], @@ -1073,9 +1073,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", - "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", + "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", "cpu": [ "x64" ], @@ -1090,9 +1090,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", - "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", + "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", "cpu": [ "arm" ], @@ -1107,9 +1107,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", - "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", + "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", "cpu": [ "arm64" ], @@ -1124,9 +1124,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", - "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", + "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", "cpu": [ "ia32" ], @@ -1141,9 +1141,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", - "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", + "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", "cpu": [ "loong64" ], @@ -1158,9 +1158,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", - "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", + "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", "cpu": [ "mips64el" ], @@ -1175,9 +1175,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", - "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", + "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", "cpu": [ "ppc64" ], @@ -1192,9 +1192,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", - "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", + "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", "cpu": [ "riscv64" ], @@ -1209,9 +1209,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", - "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", + "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", "cpu": [ "s390x" ], @@ -1226,9 +1226,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", - "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", + "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", "cpu": [ "x64" ], @@ -1243,9 +1243,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", - "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz", + "integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==", "cpu": [ "arm64" ], @@ -1260,9 +1260,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", - "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", + "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", "cpu": [ "x64" ], @@ -1277,9 +1277,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", - "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz", + "integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==", "cpu": [ "arm64" ], @@ -1294,9 +1294,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", - "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", + "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", "cpu": [ "x64" ], @@ -1311,9 +1311,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", - "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz", + "integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==", "cpu": [ "arm64" ], @@ -1328,9 +1328,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", - "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", + "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", "cpu": [ "x64" ], @@ -1345,9 +1345,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", - "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", + "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", "cpu": [ "arm64" ], @@ -1362,9 +1362,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", - "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", + "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", "cpu": [ "ia32" ], @@ -1379,9 +1379,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", - "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", + "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", "cpu": [ "x64" ], @@ -2145,9 +2145,9 @@ } }, "node_modules/@mui/x-charts": { - "version": "8.27.5", - "resolved": "https://registry.npmjs.org/@mui/x-charts/-/x-charts-8.27.5.tgz", - "integrity": "sha512-45XAKzEaTXx8D612zAghr6ofNK/OHukKTl9kuI+UmpaOE3se+khNwKHeOyXcus2uUoGoL6jxZcENklZmJDxzCg==", + "version": "8.28.0", + "resolved": "https://registry.npmjs.org/@mui/x-charts/-/x-charts-8.28.0.tgz", + "integrity": "sha512-KAVBjM/nT90rJ6IxslT8ko4BD9gQ4g3Fgk6+eRwEOROldEHhVBw4jUGgm9D9rJEo2bkCMbkX+hImP6aWH3w23g==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.28.4", @@ -2614,16 +2614,16 @@ } }, "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-rc.2", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.2.tgz", - "integrity": "sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==", + "version": "1.0.0-rc.7", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.7.tgz", + "integrity": "sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==", "dev": true, "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", - "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.0.tgz", + "integrity": "sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A==", "cpu": [ "arm" ], @@ -2635,9 +2635,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", - "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.0.tgz", + "integrity": "sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw==", "cpu": [ "arm64" ], @@ -2649,9 +2649,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", - "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.0.tgz", + "integrity": "sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA==", "cpu": [ "arm64" ], @@ -2663,9 +2663,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", - "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.0.tgz", + "integrity": "sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw==", "cpu": [ "x64" ], @@ -2677,9 +2677,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", - "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.0.tgz", + "integrity": "sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw==", "cpu": [ "arm64" ], @@ -2691,9 +2691,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", - "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.0.tgz", + "integrity": "sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA==", "cpu": [ "x64" ], @@ -2705,9 +2705,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", - "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.0.tgz", + "integrity": "sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g==", "cpu": [ "arm" ], @@ -2719,9 +2719,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", - "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.0.tgz", + "integrity": "sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ==", "cpu": [ "arm" ], @@ -2733,9 +2733,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", - "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.0.tgz", + "integrity": "sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A==", "cpu": [ "arm64" ], @@ -2747,9 +2747,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", - "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.0.tgz", + "integrity": "sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ==", "cpu": [ "arm64" ], @@ -2761,9 +2761,9 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", - "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.0.tgz", + "integrity": "sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw==", "cpu": [ "loong64" ], @@ -2775,9 +2775,9 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", - "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.0.tgz", + "integrity": "sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog==", "cpu": [ "loong64" ], @@ -2789,9 +2789,9 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", - "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.0.tgz", + "integrity": "sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ==", "cpu": [ "ppc64" ], @@ -2803,9 +2803,9 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", - "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.0.tgz", + "integrity": "sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg==", "cpu": [ "ppc64" ], @@ -2817,9 +2817,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", - "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.0.tgz", + "integrity": "sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA==", "cpu": [ "riscv64" ], @@ -2831,9 +2831,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", - "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.0.tgz", + "integrity": "sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ==", "cpu": [ "riscv64" ], @@ -2845,9 +2845,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", - "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.0.tgz", + "integrity": "sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ==", "cpu": [ "s390x" ], @@ -2859,9 +2859,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", - "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.0.tgz", + "integrity": "sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg==", "cpu": [ "x64" ], @@ -2873,9 +2873,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", - "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.0.tgz", + "integrity": "sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw==", "cpu": [ "x64" ], @@ -2887,9 +2887,9 @@ ] }, "node_modules/@rollup/rollup-openbsd-x64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", - "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.0.tgz", + "integrity": "sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw==", "cpu": [ "x64" ], @@ -2901,9 +2901,9 @@ ] }, "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", - "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.0.tgz", + "integrity": "sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA==", "cpu": [ "arm64" ], @@ -2915,9 +2915,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", - "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.0.tgz", + "integrity": "sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ==", "cpu": [ "arm64" ], @@ -2929,9 +2929,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", - "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.0.tgz", + "integrity": "sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w==", "cpu": [ "ia32" ], @@ -2943,9 +2943,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", - "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.0.tgz", + "integrity": "sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA==", "cpu": [ "x64" ], @@ -2957,9 +2957,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", - "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.0.tgz", + "integrity": "sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w==", "cpu": [ "x64" ], @@ -2978,9 +2978,9 @@ "license": "MIT" }, "node_modules/@swc/core": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.15.18.tgz", - "integrity": "sha512-z87aF9GphWp//fnkRsqvtY+inMVPgYW3zSlXH1kJFvRT5H/wiAn+G32qW5l3oEk63KSF1x3Ov0BfHCObAmT8RA==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.15.21.tgz", + "integrity": "sha512-fkk7NJcBscrR3/F8jiqlMptRHP650NxqDnspBMrRe5d8xOoCy9MLL5kOBLFXjFLfMo3KQQHhk+/jUULOMlR1uQ==", "dev": true, "hasInstallScript": true, "license": "Apache-2.0", @@ -2996,16 +2996,18 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.15.18", - "@swc/core-darwin-x64": "1.15.18", - "@swc/core-linux-arm-gnueabihf": "1.15.18", - "@swc/core-linux-arm64-gnu": "1.15.18", - "@swc/core-linux-arm64-musl": "1.15.18", - "@swc/core-linux-x64-gnu": "1.15.18", - "@swc/core-linux-x64-musl": "1.15.18", - "@swc/core-win32-arm64-msvc": "1.15.18", - "@swc/core-win32-ia32-msvc": "1.15.18", - "@swc/core-win32-x64-msvc": "1.15.18" + "@swc/core-darwin-arm64": "1.15.21", + "@swc/core-darwin-x64": "1.15.21", + "@swc/core-linux-arm-gnueabihf": "1.15.21", + "@swc/core-linux-arm64-gnu": "1.15.21", + "@swc/core-linux-arm64-musl": "1.15.21", + "@swc/core-linux-ppc64-gnu": "1.15.21", + "@swc/core-linux-s390x-gnu": "1.15.21", + "@swc/core-linux-x64-gnu": "1.15.21", + "@swc/core-linux-x64-musl": "1.15.21", + "@swc/core-win32-arm64-msvc": "1.15.21", + "@swc/core-win32-ia32-msvc": "1.15.21", + "@swc/core-win32-x64-msvc": "1.15.21" }, "peerDependencies": { "@swc/helpers": ">=0.5.17" @@ -3017,9 +3019,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.18.tgz", - "integrity": "sha512-+mIv7uBuSaywN3C9LNuWaX1jJJ3SKfiJuE6Lr3bd+/1Iv8oMU7oLBjYMluX1UrEPzwN2qCdY6Io0yVicABoCwQ==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.21.tgz", + "integrity": "sha512-SA8SFg9dp0qKRH8goWsax6bptFE2EdmPf2YRAQW9WoHGf3XKM1bX0nd5UdwxmC5hXsBUZAYf7xSciCler6/oyA==", "cpu": [ "arm64" ], @@ -3034,9 +3036,9 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.18.tgz", - "integrity": "sha512-wZle0eaQhnzxWX5V/2kEOI6Z9vl/lTFEC6V4EWcn+5pDjhemCpQv9e/TDJ0GIoiClX8EDWRvuZwh+Z3dhL1NAg==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.21.tgz", + "integrity": "sha512-//fOVntgowz9+V90lVsNCtyyrtbHp3jWH6Rch7MXHXbcvbLmbCTmssl5DeedUWLLGiAAW1wksBdqdGYOTjaNLw==", "cpu": [ "x64" ], @@ -3051,9 +3053,9 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.18.tgz", - "integrity": "sha512-ao61HGXVqrJFHAcPtF4/DegmwEkVCo4HApnotLU8ognfmU8x589z7+tcf3hU+qBiU1WOXV5fQX6W9Nzs6hjxDw==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.21.tgz", + "integrity": "sha512-meNI4Sh6h9h8DvIfEc0l5URabYMSuNvyisLmG6vnoYAS43s8ON3NJR8sDHvdP7NJTrLe0q/x2XCn6yL/BeHcZg==", "cpu": [ "arm" ], @@ -3068,9 +3070,9 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.18.tgz", - "integrity": "sha512-3xnctOBLIq3kj8PxOCgPrGjBLP/kNOddr6f5gukYt/1IZxsITQaU9TDyjeX6jG+FiCIHjCuWuffsyQDL5Ew1bg==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.21.tgz", + "integrity": "sha512-QrXlNQnHeXqU2EzLlnsPoWEh8/GtNJLvfMiPsDhk+ht6Xv8+vhvZ5YZ/BokNWSIZiWPKLAqR0M7T92YF5tmD3g==", "cpu": [ "arm64" ], @@ -3085,9 +3087,9 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.18.tgz", - "integrity": "sha512-0a+Lix+FSSHBSBOA0XznCcHo5/1nA6oLLjcnocvzXeqtdjnPb+SvchItHI+lfeiuj1sClYPDvPMLSLyXFaiIKw==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.21.tgz", + "integrity": "sha512-8/yGCMO333ultDaMQivE5CjO6oXDPeeg1IV4sphojPkb0Pv0i6zvcRIkgp60xDB+UxLr6VgHgt+BBgqS959E9g==", "cpu": [ "arm64" ], @@ -3101,10 +3103,44 @@ "node": ">=10" } }, + "node_modules/@swc/core-linux-ppc64-gnu": { + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-ppc64-gnu/-/core-linux-ppc64-gnu-1.15.21.tgz", + "integrity": "sha512-ucW0HzPx0s1dgRvcvuLSPSA/2Kk/VYTv9st8qe1Kc22Gu0Q0rH9+6TcBTmMuNIp0Xs4BPr1uBttmbO1wEGI49Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-s390x-gnu": { + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-s390x-gnu/-/core-linux-s390x-gnu-1.15.21.tgz", + "integrity": "sha512-ulTnOGc5I7YRObE/9NreAhQg94QkiR5qNhhcUZ1iFAYjzg/JGAi1ch+s/Ixe61pMIr8bfVrF0NOaB0f8wjaAfA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.18.tgz", - "integrity": "sha512-wG9J8vReUlpaHz4KOD/5UE1AUgirimU4UFT9oZmupUDEofxJKYb1mTA/DrMj0s78bkBiNI+7Fo2EgPuvOJfuAA==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.21.tgz", + "integrity": "sha512-D0RokxtM+cPvSqJIKR6uja4hbD+scI9ezo95mBhfSyLUs9wnPPl26sLp1ZPR/EXRdYm3F3S6RUtVi+8QXhT24Q==", "cpu": [ "x64" ], @@ -3119,9 +3155,9 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.18.tgz", - "integrity": "sha512-4nwbVvCphKzicwNWRmvD5iBaZj8JYsRGa4xOxJmOyHlMDpsvvJ2OR2cODlvWyGFH6BYL1MfIAK3qph3hp0Az6g==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.21.tgz", + "integrity": "sha512-nER8u7VeRfmU6fMDzl1NQAbbB/G7O2avmvCOwIul1uGkZ2/acbPH+DCL9h5+0yd/coNcxMBTL6NGepIew+7C2w==", "cpu": [ "x64" ], @@ -3136,9 +3172,9 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.18.tgz", - "integrity": "sha512-zk0RYO+LjiBCat2RTMHzAWaMky0cra9loH4oRrLKLLNuL+jarxKLFDA8xTZWEkCPLjUTwlRN7d28eDLLMgtUcQ==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.21.tgz", + "integrity": "sha512-+/AgNBnjYugUA8C0Do4YzymgvnGbztv7j8HKSQLvR/DQgZPoXQ2B3PqB2mTtGh/X5DhlJWiqnunN35JUgWcAeQ==", "cpu": [ "arm64" ], @@ -3153,9 +3189,9 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.18.tgz", - "integrity": "sha512-yVuTrZ0RccD5+PEkpcLOBAuPbYBXS6rslENvIXfvJGXSdX5QGi1ehC4BjAMl5FkKLiam4kJECUI0l7Hq7T1vwg==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.21.tgz", + "integrity": "sha512-IkSZj8PX/N4HcaFhMQtzmkV8YSnuNoJ0E6OvMwFiOfejPhiKXvl7CdDsn1f4/emYEIDO3fpgZW9DTaCRMDxaDA==", "cpu": [ "ia32" ], @@ -3170,9 +3206,9 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.15.18", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.18.tgz", - "integrity": "sha512-7NRmE4hmUQNCbYU3Hn9Tz57mK9Qq4c97ZS+YlamlK6qG9Fb5g/BB3gPDe0iLlJkns/sYv2VWSkm8c3NmbEGjbg==", + "version": "1.15.21", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.21.tgz", + "integrity": "sha512-zUyWso7OOENB6e1N1hNuNn8vbvLsTdKQ5WKLgt/JcBNfJhKy/6jmBmqI3GXk/MyvQKd5SLvP7A0F36p7TeDqvw==", "cpu": [ "x64" ], @@ -3194,9 +3230,9 @@ "license": "Apache-2.0" }, "node_modules/@swc/types": { - "version": "0.1.25", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.25.tgz", - "integrity": "sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g==", + "version": "0.1.26", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.26.tgz", + "integrity": "sha512-lyMwd7WGgG79RS7EERZV3T8wMdmPq3xwyg+1nmAM64kIhx5yl+juO2PYIHb7vTiPgPCj8LYjsNV2T5wiQHUEaw==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -3204,9 +3240,9 @@ } }, "node_modules/@tanstack/history": { - "version": "1.161.4", - "resolved": "https://registry.npmjs.org/@tanstack/history/-/history-1.161.4.tgz", - "integrity": "sha512-Kp/WSt411ZWYvgXy6uiv5RmhHrz9cAml05AQPrtdAp7eUqvIDbMGPnML25OKbzR3RJ1q4wgENxDTvlGPa9+Mww==", + "version": "1.161.6", + "resolved": "https://registry.npmjs.org/@tanstack/history/-/history-1.161.6.tgz", + "integrity": "sha512-NaOGLRrddszbQj9upGat6HG/4TKvXLvu+osAIgfxPYA+eIvYKv8GKDJOrY2D3/U9MRnKfMWD7bU4jeD4xmqyIg==", "license": "MIT", "engines": { "node": ">=20.19" @@ -3217,17 +3253,15 @@ } }, "node_modules/@tanstack/react-router": { - "version": "1.166.7", - "resolved": "https://registry.npmjs.org/@tanstack/react-router/-/react-router-1.166.7.tgz", - "integrity": "sha512-LLcXu2nrCn2WL+w0YAbg3CRZIIO2cYVSC3y+ZYlFBxBs4hh8eoNP1EWFvRLZGCFYpqON7x6qUf1u0W7tH0cJJw==", + "version": "1.168.4", + "resolved": "https://registry.npmjs.org/@tanstack/react-router/-/react-router-1.168.4.tgz", + "integrity": "sha512-1o6B0Gddlzxmw1hQkqfJC5X67P/bDaP3kVQA0X6HHO2P8e724xKiiY9V3eAOJw9YIHqmWIFewWJgzcFOgR67dA==", "license": "MIT", "dependencies": { - "@tanstack/history": "1.161.4", - "@tanstack/react-store": "^0.9.1", - "@tanstack/router-core": "1.166.7", - "isbot": "^5.1.22", - "tiny-invariant": "^1.3.3", - "tiny-warning": "^1.0.3" + "@tanstack/history": "1.161.6", + "@tanstack/react-store": "^0.9.3", + "@tanstack/router-core": "1.168.4", + "isbot": "^5.1.22" }, "engines": { "node": ">=20.19" @@ -3242,12 +3276,12 @@ } }, "node_modules/@tanstack/react-store": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@tanstack/react-store/-/react-store-0.9.2.tgz", - "integrity": "sha512-Vt5usJE5sHG/cMechQfmwvwne6ktGCELe89Lmvoxe3LKRoFrhPa8OCKWs0NliG8HTJElEIj7PLtaBQIcux5pAQ==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@tanstack/react-store/-/react-store-0.9.3.tgz", + "integrity": "sha512-y2iHd/N9OkoQbFJLUX1T9vbc2O9tjH0pQRgTcx1/Nz4IlwLvkgpuglXUx+mXt0g5ZDFrEeDnONPqkbfxXJKwRg==", "license": "MIT", "dependencies": { - "@tanstack/store": "0.9.2", + "@tanstack/store": "0.9.3", "use-sync-external-store": "^1.6.0" }, "funding": { @@ -3260,18 +3294,18 @@ } }, "node_modules/@tanstack/router-core": { - "version": "1.166.7", - "resolved": "https://registry.npmjs.org/@tanstack/router-core/-/router-core-1.166.7.tgz", - "integrity": "sha512-MCc8wYIIcxmbeidM8PL2QeaAjUIHyhEDIZPW6NGfn/uwvyi+K2ucn3AGCxxcXl4JGGm0Mx9+7buYl1v3HdcFrg==", + "version": "1.168.4", + "resolved": "https://registry.npmjs.org/@tanstack/router-core/-/router-core-1.168.4.tgz", + "integrity": "sha512-dhkan3B1KPDNT90UDSNtMbtedIbtI+WpYReE4d2GlgtA9m1w3FkrXe8az2UOIwKAtDea2Qlc8yv3st4Jro/gNQ==", "license": "MIT", "dependencies": { - "@tanstack/history": "1.161.4", - "@tanstack/store": "^0.9.1", + "@tanstack/history": "1.161.6", "cookie-es": "^2.0.0", "seroval": "^1.4.2", - "seroval-plugins": "^1.4.2", - "tiny-invariant": "^1.3.3", - "tiny-warning": "^1.0.3" + "seroval-plugins": "^1.4.2" + }, + "bin": { + "intent": "bin/intent.js" }, "engines": { "node": ">=20.19" @@ -3282,15 +3316,15 @@ } }, "node_modules/@tanstack/router-generator": { - "version": "1.166.7", - "resolved": "https://registry.npmjs.org/@tanstack/router-generator/-/router-generator-1.166.7.tgz", - "integrity": "sha512-lBI0VS7J1zMrJhfvT+3FMq9jPdOrJ3VgciPXyYvZBF/a9Mr8T94MU78PqrBNuJbYh7qCFO14ZhArUFqkYGuozQ==", + "version": "1.166.18", + "resolved": "https://registry.npmjs.org/@tanstack/router-generator/-/router-generator-1.166.18.tgz", + "integrity": "sha512-6zqp8LFRyCtBFEkRv7TCkmql6I8y9c+deTivbZwJZyl4DYULsmg4eh+Du4j11grEIG1atHxLONfmkKmzCp0a3w==", "dev": true, "license": "MIT", "dependencies": { - "@tanstack/router-core": "1.166.7", - "@tanstack/router-utils": "1.161.4", - "@tanstack/virtual-file-routes": "1.161.4", + "@tanstack/router-core": "1.168.4", + "@tanstack/router-utils": "1.161.6", + "@tanstack/virtual-file-routes": "1.161.7", "prettier": "^3.5.0", "recast": "^0.23.11", "source-map": "^0.7.4", @@ -3316,9 +3350,9 @@ } }, "node_modules/@tanstack/router-plugin": { - "version": "1.166.7", - "resolved": "https://registry.npmjs.org/@tanstack/router-plugin/-/router-plugin-1.166.7.tgz", - "integrity": "sha512-R06qe5UwApb/u02wDITVxN++6QE4xsLFQCr029VZ+4V8gyIe35kr8UCg3Jiyl6D5GXxhj62U2Ei8jccdkQaivw==", + "version": "1.167.5", + "resolved": "https://registry.npmjs.org/@tanstack/router-plugin/-/router-plugin-1.167.5.tgz", + "integrity": "sha512-0VLmmCn5qKyohfTxsJjgY4fKI7FaPAmSgpRctslasIb3FpwFFlYkpDw+jyAJ8Pbcc/ri+FZpa3JaHkL8ZTuMsw==", "dev": true, "license": "MIT", "dependencies": { @@ -3328,14 +3362,17 @@ "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.5", "@babel/types": "^7.28.5", - "@tanstack/router-core": "1.166.7", - "@tanstack/router-generator": "1.166.7", - "@tanstack/router-utils": "1.161.4", - "@tanstack/virtual-file-routes": "1.161.4", + "@tanstack/router-core": "1.168.4", + "@tanstack/router-generator": "1.166.18", + "@tanstack/router-utils": "1.161.6", + "@tanstack/virtual-file-routes": "1.161.7", "chokidar": "^3.6.0", "unplugin": "^2.1.2", "zod": "^3.24.2" }, + "bin": { + "intent": "bin/intent.js" + }, "engines": { "node": ">=20.19" }, @@ -3345,7 +3382,7 @@ }, "peerDependencies": { "@rsbuild/core": ">=1.0.2", - "@tanstack/react-router": "^1.166.7", + "@tanstack/react-router": "^1.168.4", "vite": ">=5.0.0 || >=6.0.0 || >=7.0.0", "vite-plugin-solid": "^2.11.10", "webpack": ">=5.92.0" @@ -3369,9 +3406,9 @@ } }, "node_modules/@tanstack/router-utils": { - "version": "1.161.4", - "resolved": "https://registry.npmjs.org/@tanstack/router-utils/-/router-utils-1.161.4.tgz", - "integrity": "sha512-r8TpjyIZoqrXXaf2DDyjd44gjGBoyE+/oEaaH68yLI9ySPO1gUWmQENZ1MZnmBnpUGN24NOZxdjDLc8npK0SAw==", + "version": "1.161.6", + "resolved": "https://registry.npmjs.org/@tanstack/router-utils/-/router-utils-1.161.6.tgz", + "integrity": "sha512-nRcYw+w2OEgK6VfjirYvGyPLOK+tZQz1jkYcmH5AjMamQ9PycnlxZF2aEZtPpNoUsaceX2bHptn6Ub5hGXqNvw==", "dev": true, "license": "MIT", "dependencies": { @@ -3394,9 +3431,9 @@ } }, "node_modules/@tanstack/store": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@tanstack/store/-/store-0.9.2.tgz", - "integrity": "sha512-K013lUJEFJK2ofFQ/hZKJUmCnpcV00ebLyOyFOWQvyQHUOZp/iYO84BM6aOGiV81JzwbX0APTVmW8YI7yiG5oA==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@tanstack/store/-/store-0.9.3.tgz", + "integrity": "sha512-8reSzl/qGWGGVKhBoxXPMWzATSbZLZFWhwBAFO9NAyp0TxzfBP0mIrGb8CP8KrQTmvzXlR/vFPPUrHTLBGyFyw==", "license": "MIT", "funding": { "type": "github", @@ -3404,11 +3441,14 @@ } }, "node_modules/@tanstack/virtual-file-routes": { - "version": "1.161.4", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-file-routes/-/virtual-file-routes-1.161.4.tgz", - "integrity": "sha512-42WoRePf8v690qG8yGRe/YOh+oHni9vUaUUfoqlS91U2scd3a5rkLtVsc6b7z60w3RogH0I00vdrC5AaeiZ18w==", + "version": "1.161.7", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-file-routes/-/virtual-file-routes-1.161.7.tgz", + "integrity": "sha512-olW33+Cn+bsCsZKPwEGhlkqS6w3M2slFv11JIobdnCFKMLG97oAI2kWKdx5/zsywTL8flpnoIgaZZPlQTFYhdQ==", "dev": true, "license": "MIT", + "bin": { + "intent": "bin/intent.js" + }, "engines": { "node": ">=20.19" }, @@ -3798,17 +3838,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.0.tgz", - "integrity": "sha512-qeu4rTHR3/IaFORbD16gmjq9+rEs9fGKdX0kF6BKSfi+gCuG3RCKLlSBYzn/bGsY9Tj7KE/DAQStbp8AHJGHEQ==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.2.tgz", + "integrity": "sha512-NZZgp0Fm2IkD+La5PR81sd+g+8oS6JwJje+aRWsDocxHkjyRw0J5L5ZTlN3LI1LlOcGL7ph3eaIUmTXMIjLk0w==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.57.0", - "@typescript-eslint/type-utils": "8.57.0", - "@typescript-eslint/utils": "8.57.0", - "@typescript-eslint/visitor-keys": "8.57.0", + "@typescript-eslint/scope-manager": "8.57.2", + "@typescript-eslint/type-utils": "8.57.2", + "@typescript-eslint/utils": "8.57.2", + "@typescript-eslint/visitor-keys": "8.57.2", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -3821,7 +3861,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.57.0", + "@typescript-eslint/parser": "^8.57.2", "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -3837,16 +3877,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.0.tgz", - "integrity": "sha512-XZzOmihLIr8AD1b9hL9ccNMzEMWt/dE2u7NyTY9jJG6YNiNthaD5XtUHVF2uCXZ15ng+z2hT3MVuxnUYhq6k1g==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.2.tgz", + "integrity": "sha512-30ScMRHIAD33JJQkgfGW1t8CURZtjc2JpTrq5n2HFhOefbAhb7ucc7xJwdWcrEtqUIYJ73Nybpsggii6GtAHjA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.57.0", - "@typescript-eslint/types": "8.57.0", - "@typescript-eslint/typescript-estree": "8.57.0", - "@typescript-eslint/visitor-keys": "8.57.0", + "@typescript-eslint/scope-manager": "8.57.2", + "@typescript-eslint/types": "8.57.2", + "@typescript-eslint/typescript-estree": "8.57.2", + "@typescript-eslint/visitor-keys": "8.57.2", "debug": "^4.4.3" }, "engines": { @@ -3862,14 +3902,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.0.tgz", - "integrity": "sha512-pR+dK0BlxCLxtWfaKQWtYr7MhKmzqZxuii+ZjuFlZlIGRZm22HnXFqa2eY+90MUz8/i80YJmzFGDUsi8dMOV5w==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.2.tgz", + "integrity": "sha512-FuH0wipFywXRTHf+bTTjNyuNQQsQC3qh/dYzaM4I4W0jrCqjCVuUh99+xd9KamUfmCGPvbO8NDngo/vsnNVqgw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.57.0", - "@typescript-eslint/types": "^8.57.0", + "@typescript-eslint/tsconfig-utils": "^8.57.2", + "@typescript-eslint/types": "^8.57.2", "debug": "^4.4.3" }, "engines": { @@ -3884,14 +3924,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.0.tgz", - "integrity": "sha512-nvExQqAHF01lUM66MskSaZulpPL5pgy5hI5RfrxviLgzZVffB5yYzw27uK/ft8QnKXI2X0LBrHJFr1TaZtAibw==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.2.tgz", + "integrity": "sha512-snZKH+W4WbWkrBqj4gUNRIGb/jipDW3qMqVJ4C9rzdFc+wLwruxk+2a5D+uoFcKPAqyqEnSb4l2ULuZf95eSkw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.57.0", - "@typescript-eslint/visitor-keys": "8.57.0" + "@typescript-eslint/types": "8.57.2", + "@typescript-eslint/visitor-keys": "8.57.2" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3902,9 +3942,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.0.tgz", - "integrity": "sha512-LtXRihc5ytjJIQEH+xqjB0+YgsV4/tW35XKX3GTZHpWtcC8SPkT/d4tqdf1cKtesryHm2bgp6l555NYcT2NLvA==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.2.tgz", + "integrity": "sha512-3Lm5DSM+DCowsUOJC+YqHHnKEfFh5CoGkj5Z31NQSNF4l5wdOwqGn99wmwN/LImhfY3KJnmordBq/4+VDe2eKw==", "dev": true, "license": "MIT", "engines": { @@ -3919,15 +3959,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.0.tgz", - "integrity": "sha512-yjgh7gmDcJ1+TcEg8x3uWQmn8ifvSupnPfjP21twPKrDP/pTHlEQgmKcitzF/rzPSmv7QjJ90vRpN4U+zoUjwQ==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.2.tgz", + "integrity": "sha512-Co6ZCShm6kIbAM/s+oYVpKFfW7LBc6FXoPXjTRQ449PPNBY8U0KZXuevz5IFuuUj2H9ss40atTaf9dlGLzbWZg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.57.0", - "@typescript-eslint/typescript-estree": "8.57.0", - "@typescript-eslint/utils": "8.57.0", + "@typescript-eslint/types": "8.57.2", + "@typescript-eslint/typescript-estree": "8.57.2", + "@typescript-eslint/utils": "8.57.2", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -3944,9 +3984,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.0.tgz", - "integrity": "sha512-dTLI8PEXhjUC7B9Kre+u0XznO696BhXcTlOn0/6kf1fHaQW8+VjJAVHJ3eTI14ZapTxdkOmc80HblPQLaEeJdg==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.2.tgz", + "integrity": "sha512-/iZM6FnM4tnx9csuTxspMW4BOSegshwX5oBDznJ7S4WggL7Vczz5d2W11ecc4vRrQMQHXRSxzrCsyG5EsPPTbA==", "dev": true, "license": "MIT", "engines": { @@ -3958,16 +3998,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.0.tgz", - "integrity": "sha512-m7faHcyVg0BT3VdYTlX8GdJEM7COexXxS6KqGopxdtkQRvBanK377QDHr4W/vIPAR+ah9+B/RclSW5ldVniO1Q==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.2.tgz", + "integrity": "sha512-2MKM+I6g8tJxfSmFKOnHv2t8Sk3T6rF20A1Puk0svLK+uVapDZB/4pfAeB7nE83uAZrU6OxW+HmOd5wHVdXwXA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.57.0", - "@typescript-eslint/tsconfig-utils": "8.57.0", - "@typescript-eslint/types": "8.57.0", - "@typescript-eslint/visitor-keys": "8.57.0", + "@typescript-eslint/project-service": "8.57.2", + "@typescript-eslint/tsconfig-utils": "8.57.2", + "@typescript-eslint/types": "8.57.2", + "@typescript-eslint/visitor-keys": "8.57.2", "debug": "^4.4.3", "minimatch": "^10.2.2", "semver": "^7.7.3", @@ -3996,9 +4036,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz", - "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz", + "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4038,16 +4078,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.0.tgz", - "integrity": "sha512-5iIHvpD3CZe06riAsbNxxreP+MuYgVUsV0n4bwLH//VJmgtt54sQeY2GszntJ4BjYCpMzrfVh2SBnUQTtys2lQ==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.2.tgz", + "integrity": "sha512-krRIbvPK1ju1WBKIefiX+bngPs+odIQUtR7kymzPfo1POVw3jlF+nLkmexdSSd4UCbDcQn+wMBATOOmpBbqgKg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.57.0", - "@typescript-eslint/types": "8.57.0", - "@typescript-eslint/typescript-estree": "8.57.0" + "@typescript-eslint/scope-manager": "8.57.2", + "@typescript-eslint/types": "8.57.2", + "@typescript-eslint/typescript-estree": "8.57.2" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4062,13 +4102,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.0.tgz", - "integrity": "sha512-zm6xx8UT/Xy2oSr2ZXD0pZo7Jx2XsCoID2IUh9YSTFRu7z+WdwYTRk6LhUftm1crwqbuoF6I8zAFeCMw0YjwDg==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.2.tgz", + "integrity": "sha512-zhahknjobV2FiD6Ee9iLbS7OV9zi10rG26odsQdfBO/hjSzUQbkIYgda+iNKK1zNiW2ey+Lf8MU5btN17V3dUw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.57.0", + "@typescript-eslint/types": "8.57.2", "eslint-visitor-keys": "^5.0.0" }, "engines": { @@ -4093,20 +4133,20 @@ } }, "node_modules/@vitejs/plugin-react-swc": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-4.2.3.tgz", - "integrity": "sha512-QIluDil2prhY1gdA3GGwxZzTAmLdi8cQ2CcuMW4PB/Wu4e/1pzqrwhYWVd09LInCRlDUidQjd0B70QWbjWtLxA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-4.3.0.tgz", + "integrity": "sha512-mOkXCII839dHyAt/gpoSlm28JIVDwhZ6tnG6wJxUy2bmOx7UaPjvOyIDf3SFv5s7Eo7HVaq6kRcu6YMEzt5Z7w==", "dev": true, "license": "MIT", "dependencies": { - "@rolldown/pluginutils": "1.0.0-rc.2", + "@rolldown/pluginutils": "1.0.0-rc.7", "@swc/core": "^1.15.11" }, "engines": { "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { - "vite": "^4 || ^5 || ^6 || ^7" + "vite": "^4 || ^5 || ^6 || ^7 || ^8" } }, "node_modules/@webassemblyjs/ast": { @@ -4622,9 +4662,9 @@ } }, "node_modules/baseline-browser-mapping": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", - "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", + "version": "2.10.10", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.10.tgz", + "integrity": "sha512-sUoJ3IMxx4AyRqO4MLeHlnGDkyXRoUG0/AI9fjK+vS72ekpV0yWVY7O0BVjmBcRtkNcsAO2QDZ4tdKKGoI6YaQ==", "license": "Apache-2.0", "bin": { "baseline-browser-mapping": "dist/cli.cjs" @@ -4832,9 +4872,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001777", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001777.tgz", - "integrity": "sha512-tmN+fJxroPndC74efCdp12j+0rk0RHwV5Jwa1zWaFVyw2ZxAuPeG8ZgWC3Wz7uSjT3qMRQ5XHZ4COgQmsCMJAQ==", + "version": "1.0.30001781", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001781.tgz", + "integrity": "sha512-RdwNCyMsNBftLjW6w01z8bKEvT6e/5tpPVEgtn22TiLGlstHOVecsX2KHFkD5e/vRnIE4EGzpuIODb3mtswtkw==", "funding": [ { "type": "opencollective", @@ -5197,9 +5237,9 @@ } }, "node_modules/cosmiconfig/node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.3.tgz", + "integrity": "sha512-vIYeF1u3CjlhAFekPPAk2h/Kv4T3mAkMox5OymRiJQB0spDP10LHvt+K7G9Ny6NuuMAb25/6n1qyUjAcGNf/AA==", "license": "ISC", "engines": { "node": ">= 6" @@ -5557,9 +5597,9 @@ } }, "node_modules/dayjs": { - "version": "1.11.19", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", - "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", + "version": "1.11.20", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.20.tgz", + "integrity": "sha512-YbwwqR/uYpeoP4pu043q+LTDLFBLApUP6VxRihdfNTqu4ubqMlGDLd6ErXhEgsyvY0K6nCs7nggYumAN+9uEuQ==", "license": "MIT" }, "node_modules/debug": { @@ -5617,9 +5657,9 @@ "license": "MIT" }, "node_modules/diff": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.3.tgz", - "integrity": "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ==", + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.4.tgz", + "integrity": "sha512-DPi0FmjiSU5EvQV0++GFDOJ9ASQUVFh5kD+OzOnYdi7n3Wpm9hWWGfB/O2blfHcMVTL5WkQXSnRiK9makhrcnw==", "dev": true, "license": "BSD-3-Clause", "engines": { @@ -5696,9 +5736,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.307", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.307.tgz", - "integrity": "sha512-5z3uFKBWjiNR44nFcYdkcXjKMbg5KXNdciu7mhTPo9tB7NbqSNP2sSnGR+fqknZSCwKkBN+oxiiajWs4dT6ORg==", + "version": "1.5.325", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.325.tgz", + "integrity": "sha512-PwfIw7WQSt3xX7yOf5OE/unLzsK9CaN2f/FvV3WjPR1Knoc1T9vePRVV4W1EM301JzzysK51K7FNKcusCr0zYA==", "license": "ISC" }, "node_modules/element-size": { @@ -5732,9 +5772,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.20.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.0.tgz", - "integrity": "sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==", + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.1.tgz", + "integrity": "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==", "license": "MIT", "peer": true, "dependencies": { @@ -5814,9 +5854,9 @@ } }, "node_modules/esbuild": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", - "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", + "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -5827,32 +5867,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.3", - "@esbuild/android-arm": "0.27.3", - "@esbuild/android-arm64": "0.27.3", - "@esbuild/android-x64": "0.27.3", - "@esbuild/darwin-arm64": "0.27.3", - "@esbuild/darwin-x64": "0.27.3", - "@esbuild/freebsd-arm64": "0.27.3", - "@esbuild/freebsd-x64": "0.27.3", - "@esbuild/linux-arm": "0.27.3", - "@esbuild/linux-arm64": "0.27.3", - "@esbuild/linux-ia32": "0.27.3", - "@esbuild/linux-loong64": "0.27.3", - "@esbuild/linux-mips64el": "0.27.3", - "@esbuild/linux-ppc64": "0.27.3", - "@esbuild/linux-riscv64": "0.27.3", - "@esbuild/linux-s390x": "0.27.3", - "@esbuild/linux-x64": "0.27.3", - "@esbuild/netbsd-arm64": "0.27.3", - "@esbuild/netbsd-x64": "0.27.3", - "@esbuild/openbsd-arm64": "0.27.3", - "@esbuild/openbsd-x64": "0.27.3", - "@esbuild/openharmony-arm64": "0.27.3", - "@esbuild/sunos-x64": "0.27.3", - "@esbuild/win32-arm64": "0.27.3", - "@esbuild/win32-ia32": "0.27.3", - "@esbuild/win32-x64": "0.27.3" + "@esbuild/aix-ppc64": "0.27.4", + "@esbuild/android-arm": "0.27.4", + "@esbuild/android-arm64": "0.27.4", + "@esbuild/android-x64": "0.27.4", + "@esbuild/darwin-arm64": "0.27.4", + "@esbuild/darwin-x64": "0.27.4", + "@esbuild/freebsd-arm64": "0.27.4", + "@esbuild/freebsd-x64": "0.27.4", + "@esbuild/linux-arm": "0.27.4", + "@esbuild/linux-arm64": "0.27.4", + "@esbuild/linux-ia32": "0.27.4", + "@esbuild/linux-loong64": "0.27.4", + "@esbuild/linux-mips64el": "0.27.4", + "@esbuild/linux-ppc64": "0.27.4", + "@esbuild/linux-riscv64": "0.27.4", + "@esbuild/linux-s390x": "0.27.4", + "@esbuild/linux-x64": "0.27.4", + "@esbuild/netbsd-arm64": "0.27.4", + "@esbuild/netbsd-x64": "0.27.4", + "@esbuild/openbsd-arm64": "0.27.4", + "@esbuild/openbsd-x64": "0.27.4", + "@esbuild/openharmony-arm64": "0.27.4", + "@esbuild/sunos-x64": "0.27.4", + "@esbuild/win32-arm64": "0.27.4", + "@esbuild/win32-ia32": "0.27.4", + "@esbuild/win32-x64": "0.27.4" } }, "node_modules/escalade": { @@ -6330,9 +6370,9 @@ "license": "ISC" }, "node_modules/flatted": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.1.tgz", - "integrity": "sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz", + "integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==", "dev": true, "license": "ISC" }, @@ -6438,9 +6478,9 @@ } }, "node_modules/get-tsconfig": { - "version": "4.13.6", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz", - "integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==", + "version": "4.13.7", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.7.tgz", + "integrity": "sha512-7tN6rFgBlMgpBML5j8typ92BKFi2sFQvIdpAqLA2beia5avZDrMs0FLZiM5etShWq5irVyGcGMEA1jcDaK7A/Q==", "dev": true, "license": "MIT", "dependencies": { @@ -8274,9 +8314,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", "dev": true, "license": "MIT", "engines": { @@ -8714,9 +8754,9 @@ } }, "node_modules/react-hook-form": { - "version": "7.71.2", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.71.2.tgz", - "integrity": "sha512-1CHvcDYzuRUNOflt4MOq3ZM46AronNJtQ1S7tnX6YN4y72qhgiUItpacZUAQ0TyWYci3yz1X+rXaSxiuEm86PA==", + "version": "7.72.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.72.0.tgz", + "integrity": "sha512-V4v6jubaf6JAurEaVnT9aUPKFbNtDgohj5CIgVGyPHvT9wRx5OZHVjz31GsxnPNI278XMu+ruFz+wGOscHaLKw==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -8750,9 +8790,9 @@ } }, "node_modules/react-number-format": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/react-number-format/-/react-number-format-5.4.4.tgz", - "integrity": "sha512-wOmoNZoOpvMminhifQYiYSTCLUDOiUbBunrMrMjA+dV52sY+vck1S4UhR6PkgnoCquvvMSeJjErXZ4qSaWCliA==", + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/react-number-format/-/react-number-format-5.4.5.tgz", + "integrity": "sha512-y8O2yHHj3w0aE9XO8d2BCcUOOdQTRSVq+WIuMlLVucAm5XNjJAy+BoOJiuQMldVYVOKTMyvVNfnbl2Oqp+YxGw==", "license": "MIT", "peerDependencies": { "react": "^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", @@ -9107,9 +9147,9 @@ } }, "node_modules/rollup": { - "version": "4.59.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", - "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.0.tgz", + "integrity": "sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ==", "dev": true, "license": "MIT", "dependencies": { @@ -9123,31 +9163,31 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.59.0", - "@rollup/rollup-android-arm64": "4.59.0", - "@rollup/rollup-darwin-arm64": "4.59.0", - "@rollup/rollup-darwin-x64": "4.59.0", - "@rollup/rollup-freebsd-arm64": "4.59.0", - "@rollup/rollup-freebsd-x64": "4.59.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", - "@rollup/rollup-linux-arm-musleabihf": "4.59.0", - "@rollup/rollup-linux-arm64-gnu": "4.59.0", - "@rollup/rollup-linux-arm64-musl": "4.59.0", - "@rollup/rollup-linux-loong64-gnu": "4.59.0", - "@rollup/rollup-linux-loong64-musl": "4.59.0", - "@rollup/rollup-linux-ppc64-gnu": "4.59.0", - "@rollup/rollup-linux-ppc64-musl": "4.59.0", - "@rollup/rollup-linux-riscv64-gnu": "4.59.0", - "@rollup/rollup-linux-riscv64-musl": "4.59.0", - "@rollup/rollup-linux-s390x-gnu": "4.59.0", - "@rollup/rollup-linux-x64-gnu": "4.59.0", - "@rollup/rollup-linux-x64-musl": "4.59.0", - "@rollup/rollup-openbsd-x64": "4.59.0", - "@rollup/rollup-openharmony-arm64": "4.59.0", - "@rollup/rollup-win32-arm64-msvc": "4.59.0", - "@rollup/rollup-win32-ia32-msvc": "4.59.0", - "@rollup/rollup-win32-x64-gnu": "4.59.0", - "@rollup/rollup-win32-x64-msvc": "4.59.0", + "@rollup/rollup-android-arm-eabi": "4.60.0", + "@rollup/rollup-android-arm64": "4.60.0", + "@rollup/rollup-darwin-arm64": "4.60.0", + "@rollup/rollup-darwin-x64": "4.60.0", + "@rollup/rollup-freebsd-arm64": "4.60.0", + "@rollup/rollup-freebsd-x64": "4.60.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.0", + "@rollup/rollup-linux-arm-musleabihf": "4.60.0", + "@rollup/rollup-linux-arm64-gnu": "4.60.0", + "@rollup/rollup-linux-arm64-musl": "4.60.0", + "@rollup/rollup-linux-loong64-gnu": "4.60.0", + "@rollup/rollup-linux-loong64-musl": "4.60.0", + "@rollup/rollup-linux-ppc64-gnu": "4.60.0", + "@rollup/rollup-linux-ppc64-musl": "4.60.0", + "@rollup/rollup-linux-riscv64-gnu": "4.60.0", + "@rollup/rollup-linux-riscv64-musl": "4.60.0", + "@rollup/rollup-linux-s390x-gnu": "4.60.0", + "@rollup/rollup-linux-x64-gnu": "4.60.0", + "@rollup/rollup-linux-x64-musl": "4.60.0", + "@rollup/rollup-openbsd-x64": "4.60.0", + "@rollup/rollup-openharmony-arm64": "4.60.0", + "@rollup/rollup-win32-arm64-msvc": "4.60.0", + "@rollup/rollup-win32-ia32-msvc": "4.60.0", + "@rollup/rollup-win32-x64-gnu": "4.60.0", + "@rollup/rollup-win32-x64-msvc": "4.60.0", "fsevents": "~2.3.2" } }, @@ -9184,9 +9224,9 @@ "license": "MIT" }, "node_modules/sax": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.5.0.tgz", - "integrity": "sha512-21IYA3Q5cQf089Z6tgaUTr7lDAyzoTPx5HRtbhsME8Udispad8dC/+sziTNugOEx54ilvatQ9YCzl4KQLPcRHA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.6.0.tgz", + "integrity": "sha512-6R3J5M4AcbtLUdZmRv2SygeVaM7IhrLXu9BmnOGmmACak8fiUtOsYNWUS4uK7upbmHIBbLBeFeI//477BKLBzA==", "license": "BlueOak-1.0.0", "engines": { "node": ">=11.0.0" @@ -9722,9 +9762,9 @@ } }, "node_modules/tapable": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.2.tgz", + "integrity": "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==", "license": "MIT", "peer": true, "engines": { @@ -9736,9 +9776,9 @@ } }, "node_modules/terser": { - "version": "5.46.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.46.0.tgz", - "integrity": "sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==", + "version": "5.46.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.46.1.tgz", + "integrity": "sha512-vzCjQO/rgUuK9sf8VJZvjqiqiHFaZLnOiimmUuOKODxWL8mm/xua7viT7aqX7dgPY60otQjUotzFMmCB4VdmqQ==", "license": "BSD-2-Clause", "peer": true, "dependencies": { @@ -9808,12 +9848,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", - "license": "MIT" - }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", + "dev": true, "license": "MIT" }, "node_modules/tinycolor2": { @@ -9858,9 +9893,9 @@ } }, "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { @@ -9925,9 +9960,9 @@ "license": "MIT" }, "node_modules/ts-api-utils": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz", - "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz", + "integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==", "dev": true, "license": "MIT", "engines": { @@ -10025,16 +10060,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.57.0.tgz", - "integrity": "sha512-W8GcigEMEeB07xEZol8oJ26rigm3+bfPHxHvwbYUlu1fUDsGuQ7Hiskx5xGW/xM4USc9Ephe3jtv7ZYPQntHeA==", + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.57.2.tgz", + "integrity": "sha512-VEPQ0iPgWO/sBaZOU1xo4nuNdODVOajPnTIbog2GKYr31nIlZ0fWPoCQgGfF3ETyBl1vn63F/p50Um9Z4J8O8A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.57.0", - "@typescript-eslint/parser": "8.57.0", - "@typescript-eslint/typescript-estree": "8.57.0", - "@typescript-eslint/utils": "8.57.0" + "@typescript-eslint/eslint-plugin": "8.57.2", + "@typescript-eslint/parser": "8.57.2", + "@typescript-eslint/typescript-estree": "8.57.2", + "@typescript-eslint/utils": "8.57.2" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -10081,9 +10116,9 @@ } }, "node_modules/unplugin/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { @@ -10285,9 +10320,9 @@ } }, "node_modules/vite/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { @@ -10551,9 +10586,9 @@ "license": "ISC" }, "node_modules/yaml": { - "version": "2.8.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", - "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.3.tgz", + "integrity": "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg==", "dev": true, "license": "ISC", "optional": true,