ref: up
This commit is contained in:
433
fission-python/template/examples/example_crud.py
Normal file
433
fission-python/template/examples/example_crud.py
Normal file
@@ -0,0 +1,433 @@
|
||||
"""
|
||||
Example: Basic CRUD operations for a resource.
|
||||
|
||||
This demonstrates:
|
||||
- Pydantic request validation
|
||||
- Database operations with helpers
|
||||
- Standard error handling
|
||||
- Proper Fission docstring configuration
|
||||
"""
|
||||
|
||||
from flask import request
|
||||
from helpers import (
|
||||
init_db_connection,
|
||||
db_row_to_dict,
|
||||
db_rows_to_array,
|
||||
get_user_from_headers,
|
||||
format_error_response,
|
||||
)
|
||||
from exceptions import ValidationError, NotFoundError, ConflictError, DatabaseError
|
||||
from models import ItemResponse, ItemCreateRequest, ItemUpdateRequest
|
||||
|
||||
# Pool manager executor, one request at a time
|
||||
def create(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "create-item",
|
||||
"http_triggers": {
|
||||
"create": {
|
||||
"url": "/api/items",
|
||||
"methods": ["POST"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Create a new item.
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"name": "string (required, 1-255 chars)",
|
||||
"description": "string (optional)",
|
||||
"status": "active|inactive|pending",
|
||||
"metadata": {}
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
- 200: Item created successfully
|
||||
- 400: Validation error
|
||||
- 409: Conflict (e.g., duplicate name)
|
||||
- 500: Database error
|
||||
"""
|
||||
# Get user for audit trail
|
||||
x_user = get_user_from_headers()
|
||||
|
||||
# Validate request payload
|
||||
try:
|
||||
data = ItemCreateRequest(**request.get_json())
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Invalid request: {str(e)}", x_user=x_user)
|
||||
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check for conflicts (example)
|
||||
cursor.execute(
|
||||
"SELECT id FROM items WHERE name = %s",
|
||||
(data.name,)
|
||||
)
|
||||
if cursor.fetchone():
|
||||
raise ConflictError(
|
||||
f"Item with name '{data.name}' already exists",
|
||||
x_user=x_user,
|
||||
details={"name": data.name}
|
||||
)
|
||||
|
||||
# Insert new item
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO items (name, description, status, metadata)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
RETURNING id, name, description, status, metadata, created, modified
|
||||
""",
|
||||
(data.name, data.description, data.status.value, data.metadata)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
conn.commit()
|
||||
|
||||
# Build response
|
||||
item = db_row_to_dict(cursor, row)
|
||||
return item
|
||||
|
||||
except (ValidationError, NotFoundError, ConflictError, DatabaseError):
|
||||
# Re-raise our own exceptions
|
||||
raise
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
raise DatabaseError(f"Database error: {str(e)}", x_user=x_user)
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def list_items(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "list-items",
|
||||
"http_triggers": {
|
||||
"list": {
|
||||
"url": "/api/items",
|
||||
"methods": ["GET"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
List items with optional filtering and pagination.
|
||||
|
||||
**Query Parameters:**
|
||||
- `page` (int): Page number, zero-based (default: 0)
|
||||
- `size` (int): Items per page (default: 10, max: 100)
|
||||
- `asc` (bool): Sort ascending (default: true)
|
||||
- `filter[ids]` (string[]): Filter by specific IDs
|
||||
- `filter[keyword]` (string): Search in name/description
|
||||
- `filter[status]` (string[]): Filter by status values
|
||||
- `filter[created_from]` (datetime): Filter created after
|
||||
- `filter[created_to]` (datetime): Filter created before
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"data": [...],
|
||||
"page": 0,
|
||||
"size": 10,
|
||||
"total": 42
|
||||
}
|
||||
```
|
||||
"""
|
||||
from helpers import str_to_bool
|
||||
|
||||
# Parse pagination
|
||||
page = int(request.args.get("page", 0))
|
||||
size = int(request.args.get("size", 10))
|
||||
asc = str_to_bool(request.args.get("asc", "true"))
|
||||
|
||||
# Parse filters
|
||||
ids = request.args.getlist("filter[ids]")
|
||||
keyword = request.args.get("filter[keyword]")
|
||||
statuses = request.args.getlist("filter[status]")
|
||||
|
||||
# Build query
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if ids:
|
||||
conditions.append(f"id IN ({', '.join(['%s'] * len(ids))})")
|
||||
params.extend(ids)
|
||||
if keyword:
|
||||
conditions.append("(name ILIKE %s OR description ILIKE %s)")
|
||||
params.extend([f"%{keyword}%", f"%{keyword}%"])
|
||||
if statuses:
|
||||
conditions.append(f"status IN ({', '.join(['%s'] * len(statuses))})")
|
||||
params.extend(statuses)
|
||||
|
||||
where_clause = "WHERE " + " AND ".join(conditions) if conditions else ""
|
||||
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get total count
|
||||
count_sql = f"SELECT COUNT(*) FROM items {where_clause}"
|
||||
cursor.execute(count_sql, params)
|
||||
total = cursor.fetchone()[0]
|
||||
|
||||
# Get paginated data
|
||||
offset = page * size
|
||||
data_sql = f"""
|
||||
SELECT id, name, description, status, metadata, created, modified
|
||||
FROM items
|
||||
{where_clause}
|
||||
ORDER BY created {'ASC' if asc else 'DESC'}
|
||||
LIMIT %s OFFSET %s
|
||||
"""
|
||||
cursor.execute(data_sql, params + [size, offset])
|
||||
rows = cursor.fetchall()
|
||||
items = [db_row_to_dict(cursor, row) for row in rows]
|
||||
|
||||
return {
|
||||
"data": items,
|
||||
"page": page,
|
||||
"size": size,
|
||||
"total": total
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise DatabaseError(f"Failed to list items: {str(e)}")
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_item(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "get-item",
|
||||
"http_triggers": {
|
||||
"get": {
|
||||
"url": "/api/items/:id",
|
||||
"methods": ["GET"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Get a specific item by ID.
|
||||
|
||||
**URL Parameters:**
|
||||
- `id` (string): Item UUID
|
||||
|
||||
**Response:**
|
||||
- 200: Item found
|
||||
- 404: Item not found
|
||||
- 500: Database error
|
||||
"""
|
||||
# Extract item ID from path (Fission passes path params differently depending on trigger)
|
||||
# For HTTP triggers, the ID would come from the URL path
|
||||
item_id = request.view_args.get('id') if hasattr(request, 'view_args') else None
|
||||
if not item_id:
|
||||
# Fallback: parse from query or request path
|
||||
item_id = request.path.rstrip('/').split('/')[-1]
|
||||
|
||||
if not item_id:
|
||||
raise ValidationError("Item ID is required")
|
||||
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT id, name, description, status, metadata, created, modified
|
||||
FROM items WHERE id = %s
|
||||
""",
|
||||
(item_id,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
if not row:
|
||||
raise NotFoundError(f"Item {item_id} not found")
|
||||
|
||||
return db_row_to_dict(cursor, row)
|
||||
|
||||
except NotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise DatabaseError(f"Failed to fetch item: {str(e)}")
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def update_item(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "update-item",
|
||||
"http_triggers": {
|
||||
"update": {
|
||||
"url": "/api/items/:id",
|
||||
"methods": ["PUT", "PATCH"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Update an existing item.
|
||||
|
||||
**URL Parameters:**
|
||||
- `id` (string): Item UUID
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"name": "string (optional)",
|
||||
"description": "string (optional)",
|
||||
"status": "active|inactive|pending (optional)"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
- 200: Item updated successfully
|
||||
- 404: Item not found
|
||||
- 409: Conflict (duplicate name)
|
||||
- 400: Validation error
|
||||
- 500: Database error
|
||||
"""
|
||||
x_user = get_user_from_headers()
|
||||
|
||||
# Extract item ID
|
||||
item_id = request.view_args.get('id') if hasattr(request, 'view_args') else None
|
||||
if not item_id:
|
||||
item_id = request.path.rstrip('/').split('/')[-1]
|
||||
|
||||
if not item_id:
|
||||
raise ValidationError("Item ID is required")
|
||||
|
||||
# Validate request
|
||||
try:
|
||||
data = ItemUpdateRequest(**request.get_json())
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Invalid request: {str(e)}", x_user=x_user)
|
||||
|
||||
# Build update statement dynamically
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
if data.name is not None:
|
||||
updates.append("name = %s")
|
||||
params.append(data.name)
|
||||
if data.description is not None:
|
||||
updates.append("description = %s")
|
||||
params.append(data.description)
|
||||
if data.status is not None:
|
||||
updates.append("status = %s")
|
||||
params.append(data.status.value)
|
||||
if data.metadata is not None:
|
||||
updates.append("metadata = %s")
|
||||
params.append(data.metadata)
|
||||
|
||||
if not updates:
|
||||
raise ValidationError("No update fields provided", x_user=x_user)
|
||||
|
||||
updates.append("modified = NOW()")
|
||||
params.append(item_id)
|
||||
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check for name conflict if name is being updated
|
||||
if data.name:
|
||||
cursor.execute(
|
||||
"SELECT id FROM items WHERE name = %s AND id != %s",
|
||||
(data.name, item_id)
|
||||
)
|
||||
if cursor.fetchone():
|
||||
raise ConflictError(
|
||||
f"Another item with name '{data.name}' already exists",
|
||||
x_user=x_user,
|
||||
details={"name": data.name}
|
||||
)
|
||||
|
||||
# Execute update
|
||||
sql = f"UPDATE items SET {', '.join(updates)} WHERE id = %s RETURNING *"
|
||||
cursor.execute(sql, params)
|
||||
row = cursor.fetchone()
|
||||
conn.commit()
|
||||
|
||||
if not row:
|
||||
raise NotFoundError(f"Item {item_id} not found", x_user=x_user)
|
||||
|
||||
return db_row_to_dict(cursor, row)
|
||||
|
||||
except (ValidationError, NotFoundError, ConflictError, DatabaseError):
|
||||
raise
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
raise DatabaseError(f"Failed to update item: {str(e)}", x_user=x_user)
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def delete_item(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "delete-item",
|
||||
"http_triggers": {
|
||||
"delete": {
|
||||
"url": "/api/items/:id",
|
||||
"methods": ["DELETE"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Delete an item.
|
||||
|
||||
**URL Parameters:**
|
||||
- `id` (string): Item UUID
|
||||
|
||||
**Response:**
|
||||
- 204: Item deleted successfully
|
||||
- 404: Item not found
|
||||
- 500: Database error
|
||||
"""
|
||||
x_user = get_user_from_headers()
|
||||
|
||||
# Extract item ID
|
||||
item_id = request.view_args.get('id') if hasattr(request, 'view_args') else None
|
||||
if not item_id:
|
||||
item_id = request.path.rstrip('/').split('/')[-1]
|
||||
|
||||
if not item_id:
|
||||
raise ValidationError("Item ID is required", x_user=x_user)
|
||||
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM items WHERE id = %s", (item_id,))
|
||||
conn.commit()
|
||||
|
||||
if cursor.rowcount == 0:
|
||||
raise NotFoundError(f"Item {item_id} not found", x_user=x_user)
|
||||
|
||||
return None # 204 No Content
|
||||
|
||||
except NotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
raise DatabaseError(f"Failed to delete item: {str(e)}", x_user=x_user)
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
311
fission-python/template/examples/example_scheduler.py
Normal file
311
fission-python/template/examples/example_scheduler.py
Normal file
@@ -0,0 +1,311 @@
|
||||
"""
|
||||
Example: Background job / scheduled task pattern.
|
||||
|
||||
This demonstrates:
|
||||
- Long-running job execution
|
||||
- Job status tracking
|
||||
- Error handling and retries
|
||||
- Periodic task scheduling
|
||||
- Worker session management
|
||||
|
||||
Use cases: report generation, batch processing, cleanup jobs, etc.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import time
|
||||
import uuid
|
||||
from helpers import init_db_connection, db_row_to_dict, db_rows_to_array
|
||||
from exceptions import DatabaseError
|
||||
|
||||
|
||||
def scheduled_job(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "scheduled-job",
|
||||
"http_triggers": {
|
||||
"run": {
|
||||
"url": "/jobs/run",
|
||||
"methods": ["POST"]
|
||||
}
|
||||
},
|
||||
"kafka_triggers": {
|
||||
"job-queue": {
|
||||
"topic": "job-queue",
|
||||
"consumer_group": "scheduler-workers"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Execute a scheduled or queued background job.
|
||||
|
||||
This function can be triggered:
|
||||
- Manually via HTTP POST /jobs/run
|
||||
- Automatically by message queue (Kafka)
|
||||
- By cron schedule (via Fission timer trigger)
|
||||
|
||||
**Request Body (HTTP trigger):**
|
||||
```json
|
||||
{
|
||||
"job_type": "report_generation",
|
||||
"parameters": {
|
||||
"report_type": "daily",
|
||||
"date": "2025-03-18"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
- 200: Job completed successfully
|
||||
- 202: Job accepted for async processing
|
||||
- 400: Invalid request
|
||||
- 500: Job failed
|
||||
"""
|
||||
# Parse input
|
||||
job_type = event.get("job_type") or event.get("type", "default")
|
||||
parameters = event.get("parameters", {})
|
||||
|
||||
# Generate job ID for tracking
|
||||
job_id = str(uuid.uuid4())
|
||||
started_at = datetime.datetime.utcnow()
|
||||
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Record job start
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO jobs (id, type, parameters, status, started_at)
|
||||
VALUES (%s, %s, %s, 'running', %s)
|
||||
""",
|
||||
(job_id, job_type, parameters, started_at)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
# Execute job based on type
|
||||
if job_type == "report_generation":
|
||||
result = generate_report(cursor, job_id, parameters)
|
||||
elif job_type == "data_cleanup":
|
||||
result = cleanup_old_data(cursor, job_id, parameters)
|
||||
elif job_type == "sync_external":
|
||||
result = sync_external_system(cursor, job_id, parameters)
|
||||
else:
|
||||
result = run_default_job(cursor, job_id, parameters)
|
||||
|
||||
# Mark job as completed
|
||||
completed_at = datetime.datetime.utcnow()
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE jobs
|
||||
SET status = 'completed',
|
||||
result = %s,
|
||||
completed_at = %s,
|
||||
duration = EXTRACT(EPOCH FROM (%s - started_at))
|
||||
WHERE id = %s
|
||||
""",
|
||||
(result, completed_at, completed_at, job_id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
return {
|
||||
"job_id": job_id,
|
||||
"status": "completed",
|
||||
"result": result,
|
||||
"duration_seconds": (completed_at - started_at).total_seconds()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
# Mark job as failed
|
||||
if conn:
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE jobs
|
||||
SET status = 'failed',
|
||||
error = %s,
|
||||
completed_at = NOW()
|
||||
WHERE id = %s
|
||||
""",
|
||||
(str(e), job_id)
|
||||
)
|
||||
conn.commit()
|
||||
except:
|
||||
pass
|
||||
|
||||
raise DatabaseError(f"Job {job_type} failed: {str(e)}")
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def generate_report(cursor, job_id: str, parameters: dict):
|
||||
"""
|
||||
Generate a report based on parameters.
|
||||
|
||||
Args:
|
||||
cursor: Database cursor
|
||||
job_id: Job tracking ID
|
||||
parameters: Report configuration (report_type, date, filters, etc.)
|
||||
|
||||
Returns:
|
||||
Dictionary with report metadata and summary
|
||||
"""
|
||||
report_type = parameters.get("report_type", "daily")
|
||||
report_date = parameters.get("date", datetime.datetime.utcnow().strftime("%Y-%m-%d"))
|
||||
|
||||
# Simulate report generation (could be complex aggregation queries)
|
||||
time.sleep(1) # Simulate work
|
||||
|
||||
# Example: Get statistics for the date
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) as total_orders,
|
||||
SUM(total) as revenue,
|
||||
COUNT(DISTINCT user_id) as unique_customers
|
||||
FROM orders
|
||||
WHERE DATE(created_at) = %s
|
||||
""",
|
||||
(report_date,)
|
||||
)
|
||||
stats = db_row_to_dict(cursor, cursor.fetchone())
|
||||
|
||||
return {
|
||||
"report_type": report_type,
|
||||
"date": report_date,
|
||||
"statistics": stats,
|
||||
"generated_at": datetime.datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
|
||||
def cleanup_old_data(cursor, job_id: str, parameters: dict):
|
||||
"""
|
||||
Clean up old records based on retention policy.
|
||||
|
||||
Args:
|
||||
cursor: Database cursor
|
||||
job_id: Job tracking ID
|
||||
parameters: Cleanup configuration (table, days_to_retain, etc.)
|
||||
|
||||
Returns:
|
||||
Dictionary with cleanup summary
|
||||
"""
|
||||
table = parameters.get("table", "jobs") # Table to clean
|
||||
days_to_retain = int(parameters.get("days_to_retain", 90))
|
||||
cutoff_date = datetime.datetime.utcnow() - datetime.timedelta(days=days_to_retain)
|
||||
|
||||
# Safety: prevent dropping tables
|
||||
if table not in ["jobs", "webhook_events", "logs", "sessions"]:
|
||||
raise ValueError(f"Cannot clean table: {table}")
|
||||
|
||||
# Count records to be deleted
|
||||
cursor.execute(
|
||||
f"SELECT COUNT(*) FROM {table} WHERE created_at < %s",
|
||||
(cutoff_date,)
|
||||
)
|
||||
count = cursor.fetchone()[0]
|
||||
|
||||
# Delete old records
|
||||
cursor.execute(
|
||||
f"DELETE FROM {table} WHERE created_at < %s",
|
||||
(cutoff_date,)
|
||||
)
|
||||
|
||||
return {
|
||||
"table": table,
|
||||
"cutoff_date": cutoff_date.isoformat(),
|
||||
"records_deleted": count
|
||||
}
|
||||
|
||||
|
||||
def sync_external_system(cursor, job_id: str, parameters: dict):
|
||||
"""
|
||||
Synchronize data with external system.
|
||||
|
||||
Args:
|
||||
cursor: Database cursor
|
||||
job_id: Job tracking ID
|
||||
parameters: Sync configuration (system, endpoint, filters, etc.)
|
||||
|
||||
Returns:
|
||||
Dictionary with sync summary
|
||||
"""
|
||||
system = parameters.get("system")
|
||||
endpoint = parameters.get("endpoint")
|
||||
|
||||
# This would typically make HTTP requests to external API
|
||||
# using requests library
|
||||
import requests
|
||||
|
||||
# Fetch last sync timestamp
|
||||
cursor.execute(
|
||||
"SELECT last_sync_at FROM sync_state WHERE system = %s",
|
||||
(system,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
last_sync = row[0] if row else None
|
||||
|
||||
# Build query parameters
|
||||
params = {"since": last_sync.isoformat() if last_sync else ""}
|
||||
|
||||
# Make request to external API
|
||||
try:
|
||||
resp = requests.get(endpoint, params=params, timeout=30)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
except Exception as e:
|
||||
raise DatabaseError(f"Failed to fetch from {system}: {str(e)}")
|
||||
|
||||
# Process and store data
|
||||
records_processed = 0
|
||||
for item in data.get("items", []):
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO external_data (system, external_id, data, synced_at)
|
||||
VALUES (%s, %s, %s, NOW())
|
||||
ON CONFLICT (system, external_id) DO UPDATE SET
|
||||
data = EXCLUDED.data,
|
||||
synced_at = EXCLUDED.synced_at
|
||||
""",
|
||||
(system, item["id"], item)
|
||||
)
|
||||
records_processed += 1
|
||||
|
||||
# Update sync state
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO sync_state (system, last_sync_at)
|
||||
VALUES (%s, NOW())
|
||||
ON CONFLICT (system) DO UPDATE SET
|
||||
last_sync_at = NOW()
|
||||
""",
|
||||
(system,)
|
||||
)
|
||||
|
||||
return {
|
||||
"system": system,
|
||||
"records_processed": records_processed,
|
||||
"sync_timestamp": datetime.datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
|
||||
def run_default_job(cursor, job_id: str, parameters: dict):
|
||||
"""
|
||||
Default no-op job for testing.
|
||||
|
||||
Args:
|
||||
cursor: Database cursor
|
||||
job_id: Job tracking ID
|
||||
parameters: Job parameters
|
||||
|
||||
Returns:
|
||||
Simple acknowledgment
|
||||
"""
|
||||
time.sleep(0.5) # Simulate some work
|
||||
return {
|
||||
"message": "Default job executed",
|
||||
"parameters_received": parameters
|
||||
}
|
||||
296
fission-python/template/examples/example_webhook.py
Normal file
296
fission-python/template/examples/example_webhook.py
Normal file
@@ -0,0 +1,296 @@
|
||||
"""
|
||||
Example: Webhook receiver pattern.
|
||||
|
||||
This demonstrates:
|
||||
- Processing external service callbacks
|
||||
- Signature verification
|
||||
- Event type handling
|
||||
- Idempotency checks
|
||||
- Async processing patterns
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
from flask import request
|
||||
from helpers import init_db_connection, get_secret
|
||||
from exceptions import ValidationError, DatabaseError
|
||||
|
||||
# For signed webhooks, you'll need a secret
|
||||
WEBHOOK_SECRET = get_secret("WEBHOOK_SECRET", "")
|
||||
|
||||
|
||||
def verify_signature(payload: bytes, signature: str) -> bool:
|
||||
"""
|
||||
Verify HMAC-SHA256 webhook signature.
|
||||
|
||||
Args:
|
||||
payload: Raw request body bytes
|
||||
signature: Signature header value (format: "sha256=<hex>")
|
||||
|
||||
Returns:
|
||||
True if signature is valid, False otherwise
|
||||
"""
|
||||
if not WEBHOOK_SECRET:
|
||||
return True # Skip verification if no secret configured (for dev)
|
||||
|
||||
expected = hmac.new(
|
||||
WEBHOOK_SECRET.encode(),
|
||||
payload,
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
# Signature header format: "sha256=abcdef..."
|
||||
received = signature.split("=", 1)[1] if "=" in signature else signature
|
||||
return hmac.compare_digest(expected, received)
|
||||
|
||||
|
||||
def webhook_receiver(event, context):
|
||||
"""
|
||||
```fission
|
||||
{
|
||||
"name": "webhook-receiver",
|
||||
"http_triggers": {
|
||||
"webhook": {
|
||||
"url": "/webhooks/external-service",
|
||||
"methods": ["POST"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
Receive and process webhook from external service.
|
||||
|
||||
**Request:**
|
||||
- Raw JSON payload in body
|
||||
- Signature header: `X-Webhook-Signature: sha256=<hmac>`
|
||||
|
||||
**Response:**
|
||||
- 200: Webhook accepted for processing
|
||||
- 400: Invalid signature or payload
|
||||
- 500: Processing error
|
||||
|
||||
**Idempotency:**
|
||||
This function is idempotent - duplicate webhooks with same
|
||||
event ID will not be processed twice.
|
||||
"""
|
||||
# Get raw body for signature verification
|
||||
payload = request.get_data()
|
||||
signature = request.headers.get("X-Webhook-Signature", "")
|
||||
|
||||
# Verify signature
|
||||
if not verify_signature(payload, signature):
|
||||
raise ValidationError("Invalid webhook signature")
|
||||
|
||||
# Parse payload
|
||||
try:
|
||||
data = request.get_json()
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Invalid JSON payload: {str(e)}")
|
||||
|
||||
# Validate required fields
|
||||
event_id = data.get("event_id") or data.get("id")
|
||||
event_type = data.get("event_type") or data.get("type")
|
||||
|
||||
if not event_id:
|
||||
raise ValidationError("Missing event_id in webhook payload")
|
||||
|
||||
if not event_type:
|
||||
raise ValidationError("Missing event_type in webhook payload")
|
||||
|
||||
# Idempotency check: have we already processed this event?
|
||||
conn = None
|
||||
try:
|
||||
conn = init_db_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if event already processed
|
||||
cursor.execute(
|
||||
"SELECT id FROM webhook_events WHERE event_id = %s",
|
||||
(event_id,)
|
||||
)
|
||||
if cursor.fetchone():
|
||||
# Already processed - return success (idempotent)
|
||||
return {"status": "already_processed", "event_id": event_id}
|
||||
|
||||
# Record webhook event (for idempotency)
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO webhook_events (event_id, event_type, payload, received_at)
|
||||
VALUES (%s, %s, %s, NOW())
|
||||
""",
|
||||
(event_id, event_type, payload.decode('utf-8'))
|
||||
)
|
||||
|
||||
# Process based on event type
|
||||
result = process_event(cursor, event_type, data)
|
||||
|
||||
conn.commit()
|
||||
return {"status": "processed", "event_id": event_id, "result": result}
|
||||
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
raise DatabaseError(f"Failed to process webhook: {str(e)}")
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
def process_event(cursor, event_type: str, data: dict):
|
||||
"""
|
||||
Route event to appropriate handler.
|
||||
|
||||
Args:
|
||||
cursor: Database cursor
|
||||
event_type: Type of event (e.g., "user.created", "order.updated")
|
||||
data: Event payload
|
||||
|
||||
Returns:
|
||||
Handler result
|
||||
"""
|
||||
handlers = {
|
||||
"user.created": handle_user_created,
|
||||
"user.updated": handle_user_updated,
|
||||
"user.deleted": handle_user_deleted,
|
||||
"order.created": handle_order_created,
|
||||
"order.paid": handle_order_paid,
|
||||
"order.shipped": handle_order_shipped,
|
||||
}
|
||||
|
||||
handler = handlers.get(event_type)
|
||||
if not handler:
|
||||
# Log unknown event type but don't fail
|
||||
logger = get_logger()
|
||||
logger.warning(f"Unhandled webhook event type: {event_type}")
|
||||
return {"skipped": True, "reason": "unknown_event_type"}
|
||||
|
||||
return handler(cursor, data)
|
||||
|
||||
|
||||
def handle_user_created(cursor, data: dict):
|
||||
"""Handle user creation event."""
|
||||
user_id = data.get("user_id") or data.get("id")
|
||||
email = data.get("email")
|
||||
name = data.get("name")
|
||||
|
||||
# Create user record
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO users (id, email, name, created_at)
|
||||
VALUES (%s, %s, %s, NOW())
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
email = EXCLUDED.email,
|
||||
name = EXCLUDED.name,
|
||||
updated_at = NOW()
|
||||
""",
|
||||
(user_id, email, name)
|
||||
)
|
||||
|
||||
# Send welcome email (async via message queue, etc.)
|
||||
# enqueue_welcome_email(user_id, email)
|
||||
|
||||
return {"action": "user_created", "user_id": user_id}
|
||||
|
||||
|
||||
def handle_user_updated(cursor, data: dict):
|
||||
"""Handle user update event."""
|
||||
user_id = data.get("user_id") or data.get("id")
|
||||
updates = data.get("updates", {})
|
||||
|
||||
# Build dynamic update
|
||||
set_clauses = []
|
||||
params = []
|
||||
for key, value in updates.items():
|
||||
set_clauses.append(f"{key} = %s")
|
||||
params.append(value)
|
||||
params.append(user_id)
|
||||
|
||||
cursor.execute(
|
||||
f"UPDATE users SET {', '.join(set_clauses)}, updated_at = NOW() WHERE id = %s",
|
||||
params
|
||||
)
|
||||
|
||||
return {"action": "user_updated", "user_id": user_id}
|
||||
|
||||
|
||||
def handle_user_deleted(cursor, data: dict):
|
||||
"""Handle user deletion event."""
|
||||
user_id = data.get("user_id") or data.get("id")
|
||||
|
||||
# Soft delete (mark as inactive)
|
||||
cursor.execute(
|
||||
"UPDATE users SET status = 'deleted', deleted_at = NOW() WHERE id = %s",
|
||||
(user_id,)
|
||||
)
|
||||
|
||||
return {"action": "user_deleted", "user_id": user_id}
|
||||
|
||||
|
||||
def handle_order_created(cursor, data: dict):
|
||||
"""Handle order creation event."""
|
||||
order_id = data.get("order_id") or data.get("id")
|
||||
user_id = data.get("user_id")
|
||||
total = data.get("total")
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO orders (id, user_id, total, status, created_at)
|
||||
VALUES (%s, %s, %s, 'pending', NOW())
|
||||
""",
|
||||
(order_id, user_id, total)
|
||||
)
|
||||
|
||||
return {"action": "order_created", "order_id": order_id}
|
||||
|
||||
|
||||
def handle_order_paid(cursor, data: dict):
|
||||
"""Handle order payment event."""
|
||||
order_id = data.get("order_id") or data.get("id")
|
||||
payment_id = data.get("payment_id")
|
||||
amount = data.get("amount")
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE orders
|
||||
SET status = 'paid',
|
||||
paid_amount = %s,
|
||||
payment_id = %s,
|
||||
paid_at = NOW()
|
||||
WHERE id = %s
|
||||
""",
|
||||
(amount, payment_id, order_id)
|
||||
)
|
||||
|
||||
# Trigger fulfillment
|
||||
# enqueue_fulfillment(order_id)
|
||||
|
||||
return {"action": "order_paid", "order_id": order_id}
|
||||
|
||||
|
||||
def handle_order_shipped(cursor, data: dict):
|
||||
"""Handle order shipment event."""
|
||||
order_id = data.get("order_id") or data.get("id")
|
||||
tracking_number = data.get("tracking_number")
|
||||
carrier = data.get("carrier")
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE orders
|
||||
SET status = 'shipped',
|
||||
tracking_number = %s,
|
||||
carrier = %s,
|
||||
shipped_at = NOW()
|
||||
WHERE id = %s
|
||||
""",
|
||||
(tracking_number, carrier, order_id)
|
||||
)
|
||||
|
||||
# Send shipping notification
|
||||
# send_shipping_email(order_id)
|
||||
|
||||
return {"action": "order_shipped", "order_id": order_id}
|
||||
|
||||
|
||||
def get_logger():
|
||||
"""Get logger instance."""
|
||||
import logging
|
||||
return logging.getLogger(__name__)
|
||||
Reference in New Issue
Block a user