Keyboard shortcuts

Press or to navigate between chapters

Press S or / to search in the book

Press ? to show this help

Press Esc to hide this help

Metering & Usage

The metering system ingests high-volume usage events, aggregates them by meter definition, and feeds the rating engine. It’s designed for correctness first (deduplication, idempotency) and scale second (ClickHouse-ready batch queries, Redis hot-path accumulators).

Usage Events

Usage events are CloudEvents-compatible JSON objects. Every event must have:

FieldTypeDescription
idstringClient-generated unique ID — used for deduplication
event_typestringEvent type for meter matching (e.g., api.request, storage.byte-hour)
customer_idstring (UUIDv7)Customer who generated this usage
subscription_idstring?Subscription to attribute to (optional)
timestampdatetimeWhen the event occurred (ISO 8601)
propertiesobjectArbitrary key-value pairs for filtering and aggregation

Send a Usage Event

# [curl]
curl -X POST https://api.bill.sh/v1/events \
  -H "Authorization: Bearer $TOKEN" \
  -H "Content-Type: application/json" \
  -H "Idempotency-Key: evt-$UUID" \
  -d '{
    "id": "evt-2026-02-28-abc123",
    "event_type": "api.request",
    "customer_id": "01944b1f-0000-7000-8000-000000000001",
    "subscription_id": "01944b1f-0000-7000-8000-000000000003",
    "timestamp": "2026-02-28T14:22:01Z",
    "properties": {
      "model": "gpt-4-turbo",
      "input_tokens": 512,
      "output_tokens": 256,
      "region": "us-east-1"
    }
  }'
# [Python]
import requests, uuid

event_id = f"evt-{uuid.uuid4()}"
resp = requests.post(
    "https://api.bill.sh/v1/events",
    headers={
        "Authorization": f"Bearer {TOKEN}",
        "Idempotency-Key": event_id,
    },
    json={
        "id": event_id,
        "event_type": "api.request",
        "customer_id": customer_id,
        "subscription_id": subscription_id,
        "timestamp": "2026-02-28T14:22:01Z",
        "properties": {
            "model": "gpt-4-turbo",
            "input_tokens": 512,
            "output_tokens": 256,
            "region": "us-east-1",
        },
    },
)
result = resp.json()
print(f"Accepted: {result['accepted']}, ID: {result['event_id']}")
// [Node.js]
import { randomUUID } from "crypto";

const eventId = `evt-${randomUUID()}`;
const resp = await fetch("https://api.bill.sh/v1/events", {
  method: "POST",
  headers: {
    "Authorization": `Bearer ${TOKEN}`,
    "Content-Type": "application/json",
    "Idempotency-Key": eventId,
  },
  body: JSON.stringify({
    id: eventId,
    event_type: "api.request",
    customer_id: customerId,
    subscription_id: subscriptionId,
    timestamp: new Date().toISOString(),
    properties: {
      model: "gpt-4-turbo",
      input_tokens: 512,
      output_tokens: 256,
      region: "us-east-1",
    },
  }),
});
const result = await resp.json();
console.log("Accepted:", result.accepted, "ID:", result.event_id);
// [Go]
import (
    "bytes"
    "encoding/json"
    "net/http"
    "github.com/google/uuid"
)

eventID := "evt-" + uuid.New().String()
body, _ := json.Marshal(map[string]interface{}{
    "id":              eventID,
    "event_type":      "api.request",
    "customer_id":     customerID,
    "subscription_id": subscriptionID,
    "timestamp":       "2026-02-28T14:22:01Z",
    "properties": map[string]interface{}{
        "model":         "gpt-4-turbo",
        "input_tokens":  512,
        "output_tokens": 256,
        "region":        "us-east-1",
    },
})
req, _ := http.NewRequest("POST", "https://api.bill.sh/v1/events", bytes.NewReader(body))
req.Header.Set("Authorization", "Bearer "+token)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Idempotency-Key", eventID)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()

Important: The id field is your deduplication key. Re-sending an event with the same id is safe — it will be deduplicated and only counted once.

Batch Event Ingestion

For high-throughput workloads, send events in batches:

# [curl]
curl -X POST https://api.bill.sh/v1/events/batch \
  -H "Authorization: Bearer $TOKEN" \
  -H "Content-Type: application/json" \
  -d '{
    "events": [
      {
        "id": "evt-batch-001",
        "event_type": "api.request",
        "customer_id": "01944b1f-0000-7000-8000-000000000001",
        "timestamp": "2026-02-28T14:22:01Z",
        "properties": { "model": "gpt-4-turbo", "input_tokens": 512 }
      },
      {
        "id": "evt-batch-002",
        "event_type": "api.request",
        "customer_id": "01944b1f-0000-7000-8000-000000000001",
        "timestamp": "2026-02-28T14:22:05Z",
        "properties": { "model": "gpt-4-turbo", "input_tokens": 1024 }
      }
    ]
  }'
# [Python]
events = [
    {
        "id": f"evt-batch-{i:04d}",
        "event_type": "api.request",
        "customer_id": customer_id,
        "subscription_id": subscription_id,
        "timestamp": "2026-02-28T14:22:01Z",
        "properties": {"model": "gpt-4-turbo", "input_tokens": 512 * i},
    }
    for i in range(1, 101)  # 100 events in one batch
]

resp = requests.post(
    "https://api.bill.sh/v1/events/batch",
    headers={"Authorization": f"Bearer {TOKEN}"},
    json={"events": events},
)
result = resp.json()
print(f"Accepted: {result['accepted_count']}, Duplicates: {result.get('duplicate_count', 0)}")
// [Node.js]
const events = Array.from({ length: 100 }, (_, i) => ({
  id: `evt-batch-${String(i).padStart(4, "0")}`,
  event_type: "api.request",
  customer_id: customerId,
  subscription_id: subscriptionId,
  timestamp: new Date().toISOString(),
  properties: { model: "gpt-4-turbo", input_tokens: 512 * (i + 1) },
}));

const resp = await fetch("https://api.bill.sh/v1/events/batch", {
  method: "POST",
  headers: {
    "Authorization": `Bearer ${TOKEN}`,
    "Content-Type": "application/json",
  },
  body: JSON.stringify({ events }),
});
const result = await resp.json();
console.log(`Accepted: ${result.accepted_count}, Duplicates: ${result.duplicate_count ?? 0}`);
// [Go]
type Event struct {
    ID             string                 `json:"id"`
    EventType      string                 `json:"event_type"`
    CustomerID     string                 `json:"customer_id"`
    SubscriptionID string                 `json:"subscription_id"`
    Timestamp      string                 `json:"timestamp"`
    Properties     map[string]interface{} `json:"properties"`
}

events := make([]Event, 100)
for i := range events {
    events[i] = Event{
        ID:             fmt.Sprintf("evt-batch-%04d", i),
        EventType:      "api.request",
        CustomerID:     customerID,
        SubscriptionID: subscriptionID,
        Timestamp:      "2026-02-28T14:22:01Z",
        Properties:     map[string]interface{}{"model": "gpt-4-turbo", "input_tokens": 512 * (i + 1)},
    }
}
body, _ := json.Marshal(map[string]interface{}{"events": events})
req, _ := http.NewRequest("POST", "https://api.bill.sh/v1/events/batch", bytes.NewReader(body))
req.Header.Set("Authorization", "Bearer "+token)
req.Header.Set("Content-Type", "application/json")
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()

Meter Definitions

A MeterDefinition maps an event_type to an aggregation function. Multiple meters can match the same event type (e.g., one meter for input_tokens and another for output_tokens from the same api.request event).

Aggregation Types

TypeDescriptionField
SumSum of a numeric property across all eventsproperties.tokens
CountNumber of events (ignores properties)
MaxMaximum value of a propertyproperties.response_time_ms
UniqueCountCount of distinct values for a propertyproperties.user_id

Window Types

TypeDescription
BillingPeriodAggregate across the entire billing period (most common)
SlidingRolling window (e.g., last 30 days)
TumblingFixed non-overlapping windows (e.g., daily buckets)

Query Usage Summary

Get the current usage summary for a subscription — shows each meter’s current value for the billing period:

# [curl]
curl https://api.bill.sh/v1/subscriptions/$SUB_ID/usage \
  -H "Authorization: Bearer $TOKEN"
# [Python]
resp = requests.get(
    f"https://api.bill.sh/v1/subscriptions/{subscription_id}/usage",
    headers={"Authorization": f"Bearer {TOKEN}"},
)
usage = resp.json()
print(f"Period: {usage['period_start']} → {usage['period_end']}")
for meter in usage["meters"]:
    print(f"  {meter['meter_id']}: {meter['value']} {meter.get('unit', '')}")
// [Node.js]
const resp = await fetch(
  `https://api.bill.sh/v1/subscriptions/${subscriptionId}/usage`,
  { headers: { "Authorization": `Bearer ${TOKEN}` } }
);
const usage = await resp.json();
console.log(`Period: ${usage.period_start} → ${usage.period_end}`);
for (const meter of usage.meters) {
  console.log(`  ${meter.meter_id}: ${meter.value} ${meter.unit ?? ""}`);
}
// [Go]
req, _ := http.NewRequest("GET",
    "https://api.bill.sh/v1/subscriptions/"+subscriptionID+"/usage", nil)
req.Header.Set("Authorization", "Bearer "+token)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var usage map[string]interface{}
json.NewDecoder(resp.Body).Decode(&usage)
meters := usage["meters"].([]interface{})
for _, m := range meters {
    meter := m.(map[string]interface{})
    fmt.Printf("  %v: %v\n", meter["meter_id"], meter["value"])
}

Response:

{
  "subscription_id": "01944b1f-0000-7000-8000-000000000003",
  "period_start": "2026-02-28T00:00:00Z",
  "period_end": "2026-03-28T00:00:00Z",
  "meters": [
    {
      "meter_id": "meter-input-tokens",
      "event_type": "api.request",
      "aggregation": "Sum",
      "value": "128450",
      "unit": "tokens"
    },
    {
      "meter_id": "meter-requests",
      "event_type": "api.request",
      "aggregation": "Count",
      "value": "842",
      "unit": "requests"
    }
  ]
}

Hot-Path Accumulator

For real-time spend controls, the platform maintains an in-memory accumulator (backed by Redis in production) that updates on every event ingestion. The spend alert service reads from this accumulator to enforce SoftLimit and HardLimit thresholds without round-tripping to ClickHouse.

ClickHouse Integration

For analytics queries (cohort analysis, revenue forecasting, usage breakdown by property), events are streamed to ClickHouse via Kafka. The schema uses a materialized view for efficient aggregation by (customer_id, event_type, billing_period, property_key).