Skip to content

Commit ef4a3f1

Browse files
committed
feat: refactor well inventory CSV processing for improved error handling and validation
1 parent 294fcb7 commit ef4a3f1

2 files changed

Lines changed: 48 additions & 83 deletions

File tree

schemas/thing.py

Lines changed: 2 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
from schemas.notes import NoteResponse, CreateNote
3636
from schemas.permission_history import PermissionHistoryResponse
3737

38+
3839
# -------- VALIDATE ----------
3940

4041

@@ -124,47 +125,8 @@ class CreateBaseThing(BaseCreateModel):
124125
alternate_ids: list[CreateThingIdLink] | None = None
125126
monitoring_frequencies: list[CreateMonitoringFrequency] | None = None
126127

127-
@field_validator("alternate_ids", mode="before")
128-
def use_dummy_values(cls, v):
129-
"""
130-
When alternate IDs are provided they are assumed to be the same as
131-
the thing being created. This gets handled in the function services/thing_helper.py::add_thing.
132-
By using dummy values here we can avoid validation errors and then use the
133-
thing's id when creating the actual links.
134-
"""
135-
# In "before" mode `v` is the raw input, which may be None, a list of
136-
# dicts, or already-parsed model instances (in some code paths).
137-
if v is None:
138-
return v
139-
140-
# Only process lists; for any other unexpected type, leave as-is and
141-
# let normal validation handle errors if appropriate.
142-
if not isinstance(v, list):
143-
return v
144-
145-
for alternate_id in v:
146-
normalized: list = []
147-
for alternate_id in v:
148-
# If we already have a Pydantic model instance, set the attribute if possible.
149-
if isinstance(alternate_id, BaseModel):
150-
if hasattr(alternate_id, "thing_id"):
151-
setattr(alternate_id, "thing_id", -1)
152-
normalized.append(alternate_id)
153-
else:
154-
data = alternate_id.model_dump()
155-
data["thing_id"] = -1
156-
normalized.append(data)
157-
# If it's a plain dict, add the dummy thing_id key.
158-
elif isinstance(alternate_id, dict):
159-
data = dict(alternate_id)
160-
data["thing_id"] = -1
161-
normalized.append(data)
162-
else:
163-
# For any unexpected type, leave as-is and let normal validation
164-
# handle potential errors.
165-
normalized.append(alternate_id)
166128

167-
return normalized
129+
class CreateWell(CreateBaseThing, ValidateWell):
168130
"""
169131
Schema for creating a well.
170132
"""

services/well_inventory_csv.py

Lines changed: 46 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -133,50 +133,53 @@ def _import_well_inventory_csv(session: Session, text: str, user: str):
133133
else:
134134
models, validation_errors = _make_row_models(rows, session)
135135
if models and not validation_errors:
136-
for project, items in groupby(
137-
sorted(models, key=lambda x: x.project), key=lambda x: x.project
138-
):
139-
# get project and add if does not exist
140-
# BDMS-221 adds group_type
141-
sql = select(Group).where(
142-
and_(Group.group_type == "Monitoring Plan", Group.name == project)
143-
)
144-
group = session.scalars(sql).one_or_none()
145-
if not group:
146-
group = Group(name=project, group_type="Monitoring Plan")
147-
session.add(group)
148-
session.flush()
149-
150-
for model in items:
151-
try:
152-
added = _add_csv_row(session, group, model, user)
153-
if added:
154-
session.commit()
155-
except ValueError as e:
156-
validation_errors.append(
157-
{
158-
"row": model.well_name_point_id,
159-
"field": "Invalid value",
160-
"error": str(e),
161-
}
162-
)
163-
session.rollback()
164-
continue
165-
except DatabaseError as e:
166-
logging.error(
167-
f"Database error while importing row '{model.well_name_point_id}': {e}"
168-
)
169-
validation_errors.append(
170-
{
171-
"row": model.well_name_point_id,
172-
"field": "Database error",
173-
"error": "A database error occurred while importing this row.",
174-
}
136+
current_row_id = None
137+
try:
138+
for project, items in groupby(
139+
sorted(models, key=lambda x: x.project), key=lambda x: x.project
140+
):
141+
# get project and add if does not exist
142+
# BDMS-221 adds group_type
143+
sql = select(Group).where(
144+
and_(
145+
Group.group_type == "Monitoring Plan", Group.name == project
175146
)
176-
session.rollback()
177-
continue
178-
179-
wells.append(added)
147+
)
148+
group = session.scalars(sql).one_or_none()
149+
if not group:
150+
group = Group(name=project, group_type="Monitoring Plan")
151+
session.add(group)
152+
session.flush()
153+
154+
for model in items:
155+
current_row_id = model.well_name_point_id
156+
added = _add_csv_row(session, group, model, user)
157+
wells.append(added)
158+
except ValueError as e:
159+
validation_errors.append(
160+
{
161+
"row": current_row_id or "unknown",
162+
"field": "Invalid value",
163+
"error": str(e),
164+
}
165+
)
166+
session.rollback()
167+
wells = []
168+
except DatabaseError as e:
169+
logging.error(
170+
f"Database error while importing row '{current_row_id or 'unknown'}': {e}"
171+
)
172+
validation_errors.append(
173+
{
174+
"row": current_row_id or "unknown",
175+
"field": "Database error",
176+
"error": "A database error occurred while importing this row.",
177+
}
178+
)
179+
session.rollback()
180+
wells = []
181+
else:
182+
session.commit()
180183

181184
rows_imported = len(wells)
182185
rows_processed = len(rows)

0 commit comments

Comments
 (0)