forked from honeycombio/examples
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
113 lines (93 loc) · 3.47 KB
/
app.py
File metadata and controls
113 lines (93 loc) · 3.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import json
import beeline
import datetime
import dateutil.parser
import logging
import os
from beeline.middleware.flask import HoneyMiddleware
from flask import Flask, request, render_template
from helpers import *
log = logging.getLogger(__name__)
PARSE_FAILURE_RESPONSE = (
'{"error":"unable to parse request headers"}', 400, None)
AUTH_FAILURE_RESPONSE = (
'{"error":"writekey didn\'t match valid credentials"}', 401, None)
AUTH_MISHAPEN_FAILURE_RESPONSE = (
'{"error":"writekey malformed - expect only letters and numbers"}', 400, None)
JSON_FAILURE_RESPONSE = (
'{"error":"failed to unmarshal JSON body"}', 400, None)
DATASET_LOOKUP_FAILURE_RESPONSE = (
'{"error":"failed to resolve dataset object"}', 400, None)
SCHEMA_LOOKUP_FAILURE_RESPONSE = (
'{"error":"failed to resolve schema"}', 500, None)
honeycomb_write_key = os.environ.get("HONEYCOMB_WRITEKEY")
if not honeycomb_write_key:
log.error(
"Got empty writekey from the environment. Please set HONEYCOMB_WRITEKEY")
beeline.init(writekey=honeycomb_write_key,
dataset='apiary-python', service_name='sample_app')
app = Flask(__name__)
HoneyMiddleware(app, db_events=False)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/x/alive')
def health():
return json.dumps({'alive': 'yes'})
@app.route('/1/events/<dataset_name>', methods=['POST'])
def handle_event(dataset_name):
event = {}
# parse JSON body
try:
data = json.loads(request.data)
event['Data'] = data
beeline.add_field("event_columns", len(event['Data']))
except (TypeError, json.decoder.JSONDecodeError):
return JSON_FAILURE_RESPONSE
# get writekey, timestamp, and sample rate out of HTTP headers
try:
get_headers(request, event)
except ParseFailure:
return PARSE_FAILURE_RESPONSE
# authenticate writekey or return 401
try:
team = validate_write_key(event['WriteKey'])
beeline.add_field("team", vars(team))
except AuthFailure:
return AUTH_FAILURE_RESPONSE
except AuthMishapenFailure:
return AUTH_MISHAPEN_FAILURE_RESPONSE
# take the writekey and the dataset name and get back a dataset object
try:
dataset = resolve_dataset(dataset_name)
beeline.add_field("dataset", vars(dataset))
except DatasetLookupFailure:
return DATASET_LOOKUP_FAILURE_RESPONSE
# get partition info
try:
partition = get_partition(dataset)
event['ChosenPartition'] = partition
beeline.add_field("chosen_partition", partition)
except DatasetLookupFailure:
return DATASET_LOOKUP_FAILURE_RESPONSE
# check time - set to now if not present
if 'Timestamp' not in event:
event['Timestamp'] = datetime.datetime.now(
datetime.timezone.utc).isoformat()
else:
# record the difference between the event's timestamp and now to help identify
# lagging events
event_timestamp = dateutil.parser.parse(event['Timestamp'])
event_time_delta = datetime.datetime.now(
datetime.timezone.utc) - event_timestamp
beeline.add_field("event_time_delta_sec",
event_time_delta.total_seconds())
beeline.add_field("event_time", event['Timestamp'])
# verify schema
try:
get_schema(dataset)
except SchemaLookupFailure:
return SCHEMA_LOOKUP_FAILURE_RESPONSE
# hand off to external service - write to local disk
write_event(event)
return ''