Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions backend/bounding_polygons.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ def get_state_polygon(state):
# private helpers ============================
def _make_shape(obj, as_wkt):
poly = shape(obj["geometry"])
poly = poly.simplify(0.1)
if as_wkt:
return poly.wkt
return poly
Expand Down
93 changes: 54 additions & 39 deletions backend/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
OSERoswellWaterLevelSource,
)
from .connectors.nmenv.source import DWBSiteSource, DWBAnalyteSource
from .connectors.nmose.source import NMOSEPODSiteSource
from .constants import MILLIGRAMS_PER_LITER, WGS84, FEET
from .connectors.isc_seven_rivers.source import (
ISCSevenRiversSiteSource,
Expand All @@ -61,46 +62,32 @@
from .connectors.usgs.source import NWISSiteSource, NWISWaterLevelSource
from .connectors.wqp.source import WQPSiteSource, WQPAnalyteSource, WQPWaterLevelSource

SOURCE_KEYS = (
"bernco",
"bor",
"cabq",
"ebid",
"nmbgmr_amp",
"nmed_dwb",
"nmose_isc_seven_rivers",
"nmose_roswell",
"nwis",
"pvacd",
"wqp",
)

SOURCE_DICT = {
"bernco": BernCoSiteSource,
"bor": BORSiteSource,
"cabq": CABQSiteSource,
"ebid": EBIDSiteSource,
"nmbgmr_amp": NMBGMRSiteSource,
"nmed_dwb": DWBSiteSource,
"nmose_isc_seven_rivers": ISCSevenRiversSiteSource,
"nmose_roswell": NMOSERoswellSiteSource,
"nwis": NWISSiteSource,
"pvacd": PVACDSiteSource,
"wqp": WQPSiteSource,
"nmose_pod": NMOSEPODSiteSource,
}

SOURCE_KEYS = list(SOURCE_DICT.keys())

def get_source(source):
if source == "bernco":
return BernCoSiteSource()
elif source == "bor":
return BORSiteSource()
elif source == "cabq":
return CABQSiteSource()
elif source == "ebid":
return EBIDSiteSource()
elif source == "nmbgmr_amp":
return NMBGMRSiteSource()
elif source == "nmed_dwb":
return DWBSiteSource()
elif source == "nmose_isc_seven_rivers":
return ISCSevenRiversSiteSource()
elif source == "nmose_roswell":
return NMOSERoswellSiteSource()
elif source == "nwis":
return NWISSiteSource()
elif source == "pvacd":
return PVACDSiteSource()
elif source == "wqp":
return WQPSiteSource()

return None
try:
klass = SOURCE_DICT[source]
except KeyError:
raise ValueError(f"Unknown source {source}")

if klass:
return klass()


class Config(Loggable):
Expand All @@ -116,6 +103,8 @@ class Config(Loggable):
county: str = ""
wkt: str = ""

sites_only = False

# sources
use_source_bernco: bool = True
use_source_bor: bool = True
Expand All @@ -128,6 +117,7 @@ class Config(Loggable):
use_source_nwis: bool = True
use_source_pvacd: bool = True
use_source_wqp: bool = True
use_source_nmose_pod: bool = True

# parameter
parameter: str = ""
Expand Down Expand Up @@ -186,6 +176,25 @@ def __init__(self, model=None, payload=None):
for s in SOURCE_KEYS:
setattr(self, f"use_source_{s}", s in payload.get("sources", []))

def finalize(self):
self._update_output_units()
self.make_output_directory()
self.update_output_name()
self.make_output_path()

def all_site_sources(self):
sources =[]
for s in SOURCE_KEYS:
if getattr(self, f"use_source_{s}"):
source = get_source(s)
source.set_config(self)
sources.append((source, None))

# pods = NMOSEPODSiteSource()
# pods.set_config(self)
# sources.append((pods, None))
return sources

def analyte_sources(self):
sources = []

Expand Down Expand Up @@ -383,8 +392,14 @@ def _validate_county(self):
return bool(get_county_polygon(self.county))

return True
def make_output_directory(self):
"""
Create the output directory if it doesn't exist.
"""
if not os.path.exists(self.output_dir):
os.mkdir(self.output_dir)

def _update_output_name(self):
def update_output_name(self):
"""
Generate a unique output name based on existing directories in the output directory.

Expand Down Expand Up @@ -419,7 +434,7 @@ def _update_output_name(self):

self.output_name = output_name

def _make_output_path(self):
def make_output_path(self):
if not os.path.exists(self.output_path):
os.mkdir(self.output_path)

Expand Down
41 changes: 22 additions & 19 deletions backend/connectors/nmbgmr/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,30 +73,33 @@ def get_records(self):
if config.site_limit:
params["limit"] = config.site_limit

if config.parameter.lower() != "waterlevels":
params["parameter"] = get_analyte_search_param(
config.parameter, NMBGMR_ANALYTE_MAPPING
)
else:
params["parameter"] = "Manual groundwater levels"
if not config.sites_only:

if config.parameter.lower() != "waterlevels":
params["parameter"] = get_analyte_search_param(
config.parameter, NMBGMR_ANALYTE_MAPPING
)
else:
params["parameter"] = "Manual groundwater levels"

# tags="features" because the response object is a GeoJSON
sites = self._execute_json_request(
_make_url("locations"), params, tag="features", timeout=30
)
for site in sites:
print(f"Obtaining well data for {site['properties']['point_id']}")
well_data = self._execute_json_request(
_make_url("wells"),
params={"pointid": site["properties"]["point_id"]},
tag="",
)
site["properties"]["formation"] = well_data["formation"]
site["properties"]["well_depth"] = well_data["well_depth_ftbgs"]
site["properties"]["well_depth_units"] = FEET
# site["properties"]["formation"] = None
# site["properties"]["well_depth"] = None
# site["properties"]["well_depth_units"] = FEET
if not config.sites_only:
for site in sites:
print(f"Obtaining well data for {site['properties']['point_id']}")
well_data = self._execute_json_request(
_make_url("wells"),
params={"pointid": site["properties"]["point_id"]},
tag="",
)
site["properties"]["formation"] = well_data["formation"]
site["properties"]["well_depth"] = well_data["well_depth_ftbgs"]
site["properties"]["well_depth_units"] = FEET
# site["properties"]["formation"] = None
# site["properties"]["well_depth"] = None
# site["properties"]["well_depth_units"] = FEET

return sites

Expand Down
6 changes: 3 additions & 3 deletions backend/connectors/nmbgmr/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ def _transform(self, record):
"vertical_datum": props["altitude_datum"],
"usgs_site_id": props["site_id"],
"alternate_site_id": props["alternate_site_id"],
"formation": props["formation"],
"well_depth": props["well_depth"],
"well_depth_units": props["well_depth_units"],
"formation": props.get("formation", ""),
"well_depth": props.get("well_depth", ""),
"well_depth_units": props.get("well_depth_units", ""),
}
return rec

Expand Down
40 changes: 27 additions & 13 deletions backend/connectors/nmenv/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,29 +47,43 @@ def health(self):
return self.get_records(top=10, analyte="TDS")

def get_records(self, *args, **kw):

analyte = None
if "analyte" in kw:
analyte = kw["analyte"]
elif self.config:
analyte = self.config.parameter

analyte = get_analyte_search_param(analyte, DWB_ANALYTE_MAPPING)
if analyte is None:
return []

service = self.get_service()
ds = service.datastreams()
q = ds.query()
fs = [f"ObservedProperty/id eq {analyte}"]
if self.config:
if self.config.sites_only:
ds = service.things()
q = ds.query()
fs = []
if self.config.has_bounds():
fs.append(
f"st_within(Thing/Location/location, geography'{self.config.bounding_wkt()}')"
f"st_within(Locations/location, geography'{self.config.bounding_wkt()}')"
)

q = q.filter(" and ".join(fs))
q = q.expand("Thing/Locations")
return [ds.thing.locations.entities[0] for ds in q.list()]
q = q.expand("Locations")
if fs:
q = q.filter(" and ".join(fs))
return [thing.locations.entities[0] for thing in q.list()]
else:
analyte = get_analyte_search_param(analyte, DWB_ANALYTE_MAPPING)
if analyte is None:
return []

ds = service.datastreams()
q = ds.query()
fs = [f"ObservedProperty/id eq {analyte}"]
if self.config:
if self.config.has_bounds():
fs.append(
f"st_within(Thing/Location/location, geography'{self.config.bounding_wkt()}')"
)

q = q.filter(" and ".join(fs))
q = q.expand("Thing/Locations")
return [di.thing.locations.entities[0] for di in q.list()]


class DWBAnalyteSource(STAnalyteSource):
Expand Down
70 changes: 70 additions & 0 deletions backend/connectors/nmose/source.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,72 @@
import os

from shapely import wkt
from backend.connectors import NM_STATE_BOUNDING_POLYGON
from backend.connectors.nmose.transformer import NMOSEPODSiteTransformer
from backend.source import BaseSiteSource


def wkt_to_arcgis_json(obj):
if isinstance(obj, str):
obj = wkt.loads(obj)
coords = [[coord[0], coord[1]] for coord in obj.exterior.coords]
return {
'rings': [coords],
'spatialReference': {
'wkid': 4326
}
}

class NMOSEPODSiteSource(BaseSiteSource):
"""
NMOSEPODSiteSource is a class that inherits from BaseSiteSource.
It is used to fetch site data from the NMOSEPOD API.
"""

transformer_klass = NMOSEPODSiteTransformer
chunk_size = 5000
bounding_polygon = NM_STATE_BOUNDING_POLYGON

def get_records(self, *args, **kw) -> dict:
config = self.config
params = {}
# if config.has_bounds():
# bbox = config.bbox_bounding_points()
# params["bBox"] = ",".join([str(b) for b in bbox])
# else:
# params["stateCd"] = "NM"
#
# if config.start_date:
# params["startDt"] = config.start_dt.date().isoformat()
# if config.end_date:
# params["endDt"] = config.end_dt.date().isoformat()

url = "https://services2.arcgis.com/qXZbWTdPDbTjl7Dy/arcgis/rest/services/OSE_PODs/FeatureServer/0/query"

# params['where'] = "pod_status = 'ACT' AND pod_basin IN ('A','B','C','CC','CD','CL','CP','CR','CT','E','FS',
# 'G','GSF','H', 'HA','HC','HS','HU','J','L','LA','LRG','LV','M','MR','NH','P','PL','PN','RA','RG','S','SB','SJ','SS','T','TU','UP','VV')"
#pods = 157127
params['where'] = "pod_status = 'ACT' AND pod_basin NOT IN ('SP', 'SD', 'LWD')"
params["outFields"] = ("OBJECTID,pod_basin,pod_status,easting,northing,datum,utm_accura,status,"
"pod_name,pod_nbr,pod_suffix,pod_file,depth_well,aquifer,elevation")
params["outSR"] = 4326
params["f"] = "json"
params["resultRecordCount"] = self.chunk_size
params['resultOffset'] = 0

if config.has_bounds():
wkt = config.bounding_wkt()
params["geometry"] = wkt_to_arcgis_json(wkt)
params["geometryType"] = "esriGeometryPolygon"

records = []
i=1
while 1:
rs = self._execute_json_request(url, params, tag='features')
records.extend(rs)
params['resultOffset'] += self.chunk_size
if len(rs) < self.chunk_size:
break
i+=1

return records
35 changes: 35 additions & 0 deletions backend/connectors/nmose/transformer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
from backend.transformer import BaseTransformer, SiteTransformer


class NMOSEPODSiteTransformer(SiteTransformer):
def _transform(self, record) -> dict:
"""
Transform the record into a dictionary format.

Args:
record (dict): The record to transform.

Returns:
dict: The transformed record.
"""

properties = record['attributes']
geometry = record['geometry']

# print(properties.keys())
# print(geometry.keys())
rec = {
"source": "NMOSEPOD",
"id": properties["pod_file"],
# "name": record["station_nm"],
"latitude": geometry["y"],
"longitude": geometry["x"],
"elevation": properties['elevation'],
"elevation_units": "ft",
# "horizontal_datum": datum,
# "vertical_datum": record["alt_datum_cd"],
"aquifer": properties["aquifer"],
"well_depth": properties["depth_well"],
"well_depth_units": "ft",
}
return rec
Loading
Loading