-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathclient.py
More file actions
251 lines (200 loc) · 7.5 KB
/
client.py
File metadata and controls
251 lines (200 loc) · 7.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
"""
Calanque Python Client
Supports both gRPC and HTTP (CloudEvents batch) protocols.
"""
import asyncio
import json
from dataclasses import dataclass
from datetime import datetime
from typing import AsyncIterator, Optional
from contextlib import asynccontextmanager
import httpx
@dataclass
class CloudEvent:
"""CloudEvents 1.0 representation."""
id: str
source: str
type: str
specversion: str = "1.0"
time: Optional[datetime] = None
datacontenttype: Optional[str] = None
subject: Optional[str] = None
data: Optional[dict] = None
extensions: Optional[dict] = None
# Calanque metadata
message_id: str = ""
partition: int = 0
offset: int = 0
class CalanqueHTTPClient:
"""
HTTP client for Calanque Pull API.
Uses CloudEvents batch format (application/cloudevents-batch+json).
"""
def __init__(
self,
base_url: str,
subscription: str,
timeout: float = 30.0,
):
self.base_url = base_url.rstrip("/")
self.subscription = subscription
self.timeout = timeout
self._client: Optional[httpx.AsyncClient] = None
self._pending_ids: list[str] = []
async def __aenter__(self):
self._client = httpx.AsyncClient(timeout=self.timeout)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if self._client:
await self._client.aclose()
async def pull(
self,
max_events: int = 10,
timeout: str = "5s",
) -> tuple[list[CloudEvent], str]:
"""
Pull a batch of events.
Returns:
Tuple of (events, batch_id)
"""
response = await self._client.get(
f"{self.base_url}/events",
params={
"subscription": self.subscription,
"maxEvents": max_events,
"timeout": timeout,
},
)
response.raise_for_status()
batch_id = response.headers.get("X-Batch-Id", "")
events_data = response.json()
events = []
self._pending_ids = []
for e in events_data:
event = CloudEvent(
id=e["id"],
source=e["source"],
type=e["type"],
specversion=e.get("specversion", "1.0"),
time=datetime.fromisoformat(e["time"]) if e.get("time") else None,
datacontenttype=e.get("datacontenttype"),
subject=e.get("subject"),
data=e.get("data"),
extensions=e.get("extensions"),
message_id=e.get("messageid", ""),
partition=e.get("partition", 0),
offset=e.get("offset", 0),
)
events.append(event)
self._pending_ids.append(event.message_id)
return events, batch_id
async def ack(
self,
message_ids: Optional[list[str]] = None,
batch_id: Optional[str] = None,
) -> bool:
"""Acknowledge processed messages."""
ids = message_ids or self._pending_ids
response = await self._client.post(
f"{self.base_url}/ack",
params={"subscription": self.subscription},
json={"messageIds": ids, "batchId": batch_id},
)
response.raise_for_status()
self._pending_ids = [
mid for mid in self._pending_ids if mid not in ids
]
return True
async def nack(
self,
message_ids: Optional[list[str]] = None,
requeue: bool = True,
) -> bool:
"""
Negative acknowledge messages.
Args:
message_ids: IDs to nack (default: all pending)
requeue: If True, message will be retried. If False, sent to DLQ.
"""
ids = message_ids or self._pending_ids
response = await self._client.post(
f"{self.base_url}/nack",
params={"subscription": self.subscription},
json={"messageIds": ids, "requeue": requeue},
)
response.raise_for_status()
self._pending_ids = [
mid for mid in self._pending_ids if mid not in ids
]
return True
async def ack_one(self, message_id: str) -> bool:
"""Acknowledge a single message."""
return await self.ack(message_ids=[message_id])
async def nack_one(self, message_id: str, requeue: bool = True) -> bool:
"""Negative acknowledge a single message."""
return await self.nack(message_ids=[message_id], requeue=requeue)
class ProcessingError(Exception):
"""Exception for message processing errors."""
def __init__(self, message: str, retryable: bool = True):
super().__init__(message)
self.retryable = retryable
@asynccontextmanager
async def calanque_client(
base_url: str,
subscription: str,
timeout: float = 30.0,
) -> AsyncIterator[CalanqueHTTPClient]:
"""Context manager for Calanque HTTP client."""
async with CalanqueHTTPClient(base_url, subscription, timeout) as client:
yield client
# Example usage
async def main():
"""Example: Order processing with retry handling."""
async with calanque_client(
"http://order-processor-consumer:8080",
"order-processor",
) as client:
while True:
# Pull batch of events
events, batch_id = await client.pull(max_events=10)
if not events:
await asyncio.sleep(1)
continue
print(f"Received {len(events)} events (batch: {batch_id})")
# Process each event individually
for event in events:
try:
await process_order(event)
# Ack successful processing
await client.ack_one(event.message_id)
print(f"✓ Processed: {event.type} - {event.id}")
except ProcessingError as e:
if e.retryable:
# Requeue for retry
await client.nack_one(event.message_id, requeue=True)
print(f"↻ Retrying: {event.type} - {event.id}")
else:
# Send to DLQ
await client.nack_one(event.message_id, requeue=False)
print(f"✗ DLQ: {event.type} - {event.id}")
except Exception as e:
# Unknown error - retry
await client.nack_one(event.message_id, requeue=True)
print(f"↻ Error, retrying: {event.type} - {e}")
async def process_order(event: CloudEvent):
"""Example order processor."""
if event.type == "order.created":
order = event.data
# Validate order
if not order.get("items"):
raise ProcessingError("Order has no items", retryable=False)
# Check inventory (might fail temporarily)
if order.get("_test_fail"):
raise ProcessingError("Inventory check failed", retryable=True)
print(f" Processing order: {order.get('id')}")
elif event.type == "order.updated":
print(f" Order updated: {event.data.get('id')}")
elif event.type == "order.cancelled":
print(f" Order cancelled: {event.data.get('id')}")
if __name__ == "__main__":
asyncio.run(main())