Skip to content

Commit 5203958

Browse files
committed
Make GCS optional for transfer elevation cache and cleanup
When GCS_BUCKET_NAME is unset, get_cached_elevations() returns an empty dict instead of failing with 'Cannot determine path without bucket name'. dump_cached_elevations() and cleanup_locations() skip GCS operations in that case, allowing transfers to run locally without GCS credentials. Made-with: Cursor
1 parent 7bf0032 commit 5203958

1 file changed

Lines changed: 16 additions & 9 deletions

File tree

transfers/well_transfer_util.py

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from sqlalchemy.orm import Session
2121

2222
from db import GeologicFormation, Location
23-
from services.gcs_helper import get_storage_bucket
23+
from services.gcs_helper import GCS_BUCKET_NAME, get_storage_bucket
2424
from services.util import (
2525
get_state_from_point,
2626
get_county_from_point,
@@ -156,13 +156,17 @@ def get_or_create_geologic_formation(
156156

157157

158158
def get_cached_elevations() -> dict:
159+
if not GCS_BUCKET_NAME:
160+
return {}
159161
bucket = get_storage_bucket()
160162
log_filename = "transfer_data/cached_elevations.json"
161163
blob = bucket.blob(log_filename)
162164
return download_blob_json(blob, default={})
163165

164166

165167
def dump_cached_elevations(lut: dict):
168+
if not GCS_BUCKET_NAME:
169+
return
166170
bucket = get_storage_bucket()
167171
log_filename = "transfer_data/cached_elevations.json"
168172
blob = bucket.blob(log_filename)
@@ -174,17 +178,19 @@ def cleanup_locations(session):
174178
n = len(locations)
175179
lut = {}
176180

177-
bucket = get_storage_bucket()
178-
log_filename = "transfer_data/location_cleanup.json"
179-
blob = bucket.blob(log_filename)
180-
if blob.exists():
181-
lut = download_blob_json(blob, default={})
181+
if GCS_BUCKET_NAME:
182+
bucket = get_storage_bucket()
183+
log_filename = "transfer_data/location_cleanup.json"
184+
blob = bucket.blob(log_filename)
185+
if blob.exists():
186+
lut = download_blob_json(blob, default={})
182187

183188
updates = []
184189
for i, location in enumerate(locations):
185190
if i and not i % 100:
186-
logger.info(f"Processing row {i} of {n}. dumping lut to {log_filename}")
187-
upload_blob_json(blob, lut)
191+
logger.info(f"Processing row {i} of {n}")
192+
if GCS_BUCKET_NAME:
193+
upload_blob_json(blob, lut)
188194
session.bulk_update_mappings(Location, updates)
189195
session.commit()
190196
updates = []
@@ -222,7 +228,8 @@ def cleanup_locations(session):
222228
f"={quad_name}"
223229
)
224230

225-
upload_blob_json(blob, lut)
231+
if GCS_BUCKET_NAME:
232+
upload_blob_json(blob, lut)
226233
if updates:
227234
session.bulk_update_mappings(Location, updates)
228235
session.commit()

0 commit comments

Comments
 (0)