Azure-Connected Edition
Enterprise integration with PII-compliant architecture and real-time triage workflows
Version 1.0EF Embedded provides two core APIs for enterprise portal integration:
All data processing happens in Cloudflare Workers with PII stored exclusively in your Azure Blob Storage. APIs return metadata and statistics only (no PII values).
https://api.strataweave.com/v1
All API requests require authentication via API Key in the request header.
Authorization: Bearer {API_KEY}
API Keys are provisioned per environment:
Fetch list of file processing runs with pagination and filtering.
| Parameter | Type | Required | Description |
|---|---|---|---|
limit |
integer | No | Records per page (default: 50, max: 200) |
offset |
integer | No | Pagination offset (default: 0) |
status |
string | No | Filter by status: processing, completed, quarantine, failed |
start_date |
string | No | Filter runs after date (ISO 8601: YYYY-MM-DD) |
partner |
string | No | Filter by partner name (e.g., usaa, avient) |
{
"data": [
{
"run_id": "run_2025-01-15T14:32:10Z_usaa",
"partner": "usaa",
"file_name": "USAA_Enrollment_20250115.csv",
"azure_blob_path": "landing/usaa/2025-01-15/USAA_Enrollment_20250115.csv",
"status": "completed",
"started_at": "2025-01-15T14:32:10Z",
"completed_at": "2025-01-15T14:32:18Z",
"duration_seconds": 8,
"stats": {
"total_rows": 1247,
"valid_rows": 1198,
"quarantine_rows": 49,
"error_count": 52
},
"outputs": {
"normalized": "normalized/usaa/2025-01-15/USAA_Enrollment_20250115_normalized.csv",
"quarantine": "quarantine/usaa/2025-01-15/USAA_Enrollment_20250115_quarantine.csv",
"validation_report": "audit/usaa/2025-01-15/validation.json"
}
}
],
"pagination": {
"total": 1247,
"limit": 50,
"offset": 0,
"has_more": true
}
}
Fetch aggregated statistics across all runs.
| Parameter | Type | Required | Description |
|---|---|---|---|
start_date |
string | Yes | Start date (ISO 8601: YYYY-MM-DD) |
end_date |
string | Yes | End date (ISO 8601: YYYY-MM-DD) |
group_by |
string | No | Aggregation: day, week, month, partner (default: day) |
{
"period": {
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"group_by": "day"
},
"summary": {
"total_files": 342,
"total_rows": 456789,
"valid_rows": 441234,
"quarantine_rows": 15555,
"error_rate": 0.034,
"avg_processing_time_seconds": 12.4
},
"daily_breakdown": [
{
"date": "2025-01-15",
"files_processed": 14,
"total_rows": 18245,
"valid_rows": 17512,
"quarantine_rows": 733,
"error_count": 781,
"avg_processing_time_seconds": 11.2
}
],
"top_errors": [
{
"error_code": "INVALID_ZIP",
"count": 1247,
"percentage": 0.42
}
]
}
Fetch row-level errors for a quarantine file.
| Parameter | Type | Required | Description |
|---|---|---|---|
limit |
integer | No | Records per page (default: 50, max: 200) |
offset |
integer | No | Pagination offset (default: 0) |
error_code |
string | No | Filter by specific error code |
{
"run_id": "run_2025-01-15T14:32:10Z_usaa",
"quarantine_blob_path": "quarantine/usaa/2025-01-15/USAA_Enrollment_20250115_quarantine.csv",
"total_errors": 49,
"errors": [
{
"row_id": "row_42",
"row_number": 42,
"field_name": "zip",
"field_value_hash": "a3f8d2e1...",
"error_code": "INVALID_ZIP",
"error_message": "ZIP code must be 5 digits",
"suggested_fix": {
"value": "30309",
"confidence": 0.89,
"reasoning": "Detected 4-digit ZIP, added leading zero based on city/state"
},
"context": {
"city": "Atlanta",
"state": "GA"
}
}
]
}
field_value_hash: SHA-256 hash of actual PII value (actual value stays in Azure Blob)suggested_fix: AI-powered correction suggestion (optional, may be null)context: Additional fields from the row to help with correction (no PII)Submit a row-level correction for a quarantined record.
{
"row_id": "row_42",
"field_name": "zip",
"corrected_value": "30309",
"fix_type": "manual",
"user_id": "[email protected]",
"notes": "Added leading zero based on city context"
}
{
"status": "accepted",
"row_id": "row_42",
"field_name": "zip",
"corrected_value_hash": "c9f2a8d1...",
"applied_at": "2025-01-15T15:12:34Z",
"audit_id": "fix_2025-01-15T15:12:34Z_row42_zip",
"message": "Correction applied. Reprocess to generate clean file."
}
Reprocess a quarantine file after corrections have been applied.
{
"user_id": "[email protected]",
"webhook_url": "https://your-portal.example.com/api/webhooks/triage-complete",
"notes": "Reprocessing after 49 corrections"
}
{
"status": "reprocessing",
"reprocess_run_id": "run_2025-01-15T15:20:05Z_usaa_reprocess",
"original_run_id": "run_2025-01-15T14:32:10Z_usaa",
"started_at": "2025-01-15T15:20:05Z",
"estimated_completion": "2025-01-15T15:20:15Z",
"webhook_url": "https://your-portal.example.com/api/webhooks/triage-complete"
}
// Fetch recent file processing runs
async function fetchRecentRuns() {
const response = await fetch(
'https://api.strataweave.com/v1/analytics/runs?limit=20&status=completed',
{
headers: {
'Authorization': `Bearer ${process.env.EF_API_KEY}`,
'Content-Type': 'application/json'
}
}
);
const data = await response.json();
return data.data;
}
// Display in your portal UI
const runs = await fetchRecentRuns();
runs.forEach(run => {
console.log(`${run.partner}: ${run.stats.valid_rows}/${run.stats.total_rows} valid`);
});
// Fetch quarantine errors
async function fetchQuarantineErrors(runId) {
const response = await fetch(
`https://api.strataweave.com/v1/triage/${runId}/errors?limit=100`,
{
headers: { 'Authorization': `Bearer ${process.env.EF_API_KEY}` }
}
);
return (await response.json()).errors;
}
// Apply correction
async function applyCorrection(runId, rowId, fieldName, correctedValue, userId) {
const response = await fetch(
`https://api.strataweave.com/v1/triage/${runId}/fix`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.EF_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
row_id: rowId,
field_name: fieldName,
corrected_value: correctedValue,
fix_type: 'manual',
user_id: userId
})
}
);
return await response.json();
}
// Example usage
const errors = await fetchQuarantineErrors('run_2025-01-15T14:32:10Z_usaa');
const result = await applyCorrection(
'run_2025-01-15T14:32:10Z_usaa',
'row_42',
'zip',
'30309',
'[email protected]'
);
console.log(`Fix applied: ${result.audit_id}`);
const express = require('express');
const crypto = require('crypto');
const app = express();
app.use(express.json());
// Webhook endpoint for reprocess completion
app.post('/api/webhooks/triage-complete', async (req, res) => {
const { reprocess_run_id, status, stats } = req.body;
console.log(`Reprocess ${reprocess_run_id} completed: ${stats.valid_rows}/${stats.total_rows} valid`);
// Update your portal UI, send notification to Ops team
await notifyOpsTeam(reprocess_run_id, stats);
res.status(200).json({ received: true });
});
app.listen(3000);
| Code | Error Code | Description |
|---|---|---|
400 |
INVALID_REQUEST | Missing or invalid parameters |
401 |
UNAUTHORIZED | Invalid or missing API key |
404 |
NOT_FOUND | Resource not found (e.g., run_id doesn't exist) |
429 |
RATE_LIMIT_EXCEEDED | Too many requests |
500 |
INTERNAL_ERROR | Server error (retry with exponential backoff) |
{
"error": {
"code": "INVALID_REQUEST",
"message": "Missing required parameter: row_id",
"details": {
"parameter": "row_id",
"expected": "string",
"received": null
},
"request_id": "req_2025-01-15T15:30:42Z"
}
}
| Tier | Requests/Minute | Requests/Hour | Burst |
|---|---|---|---|
| Sandbox | 60 | 1,000 | 10 |
| Production | 600 | 30,000 | 100 |
X-RateLimit-Limit: 1000
X-RateLimit-Remaining: 847
X-RateLimit-Reset: 1642345678
Your portal may need direct access to Azure Blob files for certain operations (e.g., downloading quarantine CSV for offline review).
All file paths returned by the Analytics API are relative paths within your Azure Blob Storage container.
const { BlobServiceClient } = require('@azure/storage-blob');
async function downloadQuarantineFile(blobPath) {
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.AZURE_STORAGE_CONNECTION_STRING
);
const containerClient = blobServiceClient.getContainerClient('your-container');
const blobClient = containerClient.getBlobClient(blobPath);
const downloadResponse = await blobClient.download();
const csvContent = await streamToString(downloadResponse.readableStreamBody);
return csvContent;
}