Skip to content

@helix-agents/store-cloudflare

Cloudflare storage implementations using D1 and Durable Objects.

Installation

bash
npm install @helix-agents/store-cloudflare

createCloudflareStore

Factory function for creating stores from bindings.

typescript
import { createCloudflareStore } from '@helix-agents/store-cloudflare';

const { stateStore, streamManager } = createCloudflareStore({
  db: env.AGENT_DB, // D1Database binding
  streams: env.STREAMS, // DurableObjectNamespace binding
});

Options

typescript
interface CreateCloudflareStoreOptions {
  // State store options
  stateOptions?: {
    logger?: Logger;
  };

  // Stream manager options
  streamOptions?: {
    bufferSize?: number;
    flushInterval?: number;
    logger?: Logger;
  };
}

D1StateStore

D1-backed state storage.

typescript
import { D1StateStore } from '@helix-agents/store-cloudflare';

const stateStore = new D1StateStore({
  database: env.AGENT_DB,
  logger: console,
});

Methods

typescript
// Save state (runId is inside state object)
await stateStore.save(state);

// Load state
const state = await stateStore.load(runId);

// Check existence
const exists = await stateStore.exists(runId);

// Update status
await stateStore.updateStatus(runId, 'completed');

// Get messages with pagination
const { messages, hasMore } = await stateStore.getMessages(runId, {
  offset: 0,
  limit: 50,
});

Database Schema

sql
CREATE TABLE agent_state (
  run_id TEXT PRIMARY KEY,
  agent_type TEXT NOT NULL,
  status TEXT NOT NULL DEFAULT 'running',
  step_count INTEGER NOT NULL DEFAULT 0,
  custom_state TEXT NOT NULL DEFAULT '{}',
  messages TEXT NOT NULL DEFAULT '[]',
  output TEXT,
  error TEXT,
  stream_id TEXT,
  parent_agent_id TEXT,
  sub_agents TEXT NOT NULL DEFAULT '[]',
  aborted INTEGER DEFAULT 0,
  abort_reason TEXT,
  created_at TEXT NOT NULL DEFAULT (datetime('now')),
  updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);

DurableObjectStreamManager

Stream management using Durable Objects.

typescript
import { DurableObjectStreamManager } from '@helix-agents/store-cloudflare';

const streamManager = new DurableObjectStreamManager({
  streamNamespace: env.STREAMS,
  bufferSize: 100, // Chunks to buffer before flush
  flushInterval: 100, // Flush interval (ms)
  logger: console,
});

Methods

typescript
// Create writer (implicitly creates stream)
const writer = await streamManager.createWriter(streamId, agentId, agentType);
await writer.write(chunk);
await writer.close();

// Create reader
const reader = await streamManager.createReader(streamId);
for await (const chunk of reader) {
  // Process chunk
}

// End stream
await streamManager.endStream(streamId);
await streamManager.endStream(streamId, output); // With final output

// Fail stream
await streamManager.failStream(streamId, 'Error message');

// Get stream info
const info = await streamManager.getInfo(streamId);

// Resumable reader
const reader = await streamManager.createResumableReader(streamId, {
  fromSequence: 100,
});

Events

typescript
interface BufferOverflowEvent {
  streamId: string;
  droppedCount: number;
  bufferSize: number;
}

streamManager.on('bufferOverflow', (event: BufferOverflowEvent) => {
  console.warn('Buffer overflow:', event);
});

StreamServer

PartyServer-based Durable Object class for stream coordination. Must be exported from your worker.

typescript
import { StreamServer } from '@helix-agents/store-cloudflare';

// In worker.ts - re-export the class
export { StreamServer };

Custom Logger

Create a custom class with a logger:

typescript
import { StreamServer } from '@helix-agents/store-cloudflare';

const CustomStreamServer = StreamServer.withLogger(myLogger);

export { CustomStreamServer as StreamServer };

Migrations

D1 schema migrations.

typescript
import {
  runMigration,
  isMigrated,
  dropAllTables,
  getAgentsMigrationVersion,
  getAgentsTableNames,
  SCHEMA_MIGRATION_V1,
  CURRENT_SCHEMA_VERSION,
  TABLE_NAMES,
} from '@helix-agents/store-cloudflare';

// Run migration
await runMigration(env.AGENT_DB);

// Check if migrated
const migrated = await isMigrated(env.AGENT_DB);

// Get current version
const version = await getAgentsMigrationVersion(env.AGENT_DB);

// Drop all tables (for testing)
await dropAllTables(env.AGENT_DB);

Migration SQL

typescript
// Get the SQL for manual migration
console.log(SCHEMA_MIGRATION_V1);

Errors

typescript
import {
  D1StateError, // Base D1 error
  StateNotFoundError, // State not found
  SubAgentRefNotFoundError,
  StreamConnectionError, // DO connection error
  StreamNotFoundError, // Stream not found
  SequenceConflictError, // Sequence mismatch
} from '@helix-agents/store-cloudflare';

try {
  const state = await stateStore.load(runId);
} catch (error) {
  if (error instanceof StateNotFoundError) {
    // Handle not found
  }
}

Complete Example

wrangler.toml

toml
name = "my-agent-worker"
main = "src/worker.ts"
compatibility_date = "2024-12-01"
compatibility_flags = ["nodejs_compat"]

[[d1_databases]]
binding = "AGENT_DB"
database_name = "my-agents-db"
database_id = "xxx-xxx-xxx"

[[durable_objects.bindings]]
name = "STREAMS"
class_name = "StreamServer"

[[migrations]]
tag = "v1"
new_sqlite_classes = ["StreamServer"]

worker.ts

typescript
import {
  createCloudflareStore,
  StreamServer,
  runMigration,
} from '@helix-agents/store-cloudflare';

// Re-export Durable Object
export { StreamServer };

interface Env {
  AGENT_DB: D1Database;
  STREAMS: DurableObjectNamespace;
}

export default {
  async fetch(request: Request, env: Env) {
    // Run migration on first request (or use scheduled task)
    await runMigration(env.AGENT_DB);

    // Create stores
    const { stateStore, streamManager } = createCloudflareStore({
      db: env.AGENT_DB,
      streams: env.STREAMS,
    });

    // Use stores...
    const state = await stateStore.load('run-123');

    return Response.json({ state });
  },
};

Migration Script

bash
# Create database
npx wrangler d1 create my-agents-db

# Run migration (creates tables)
npx wrangler d1 execute my-agents-db --file=./migrations/0001_init.sql

D1UsageStore

D1-backed usage tracking storage. Track LLM tokens, tool executions, sub-agent calls, and custom metrics in Cloudflare Workers.

typescript
import { D1UsageStore } from '@helix-agents/store-cloudflare';

const usageStore = new D1UsageStore({
  database: env.AGENT_DB,
  tableName: 'usage_entries',  // Optional, default: 'usage_entries'
});

Basic Usage

Pass to executor to enable usage tracking:

typescript
const handle = await executor.execute(agent, 'Do the task', { usageStore });
await handle.result();

// Get aggregated usage
const rollup = await handle.getUsageRollup();
console.log(`Total tokens: ${rollup?.tokens.total}`);

Methods

recordEntry

Record a usage entry (called internally by the framework).

typescript
await usageStore.recordEntry({
  kind: 'tokens',
  runId: 'run-123',
  stepCount: 1,
  timestamp: Date.now(),
  source: { type: 'agent', name: 'my-agent' },
  model: 'gpt-4o',
  tokens: { prompt: 100, completion: 50, total: 150 },
});

getEntries

Get usage entries for a run with optional filtering.

typescript
// All entries
const entries = await usageStore.getEntries('run-123');

// Filter by kind
const tokenEntries = await usageStore.getEntries('run-123', {
  kinds: ['tokens'],
});

// Filter by step range
const midRunEntries = await usageStore.getEntries('run-123', {
  stepRange: { min: 5, max: 10 },
});

// Pagination
const page = await usageStore.getEntries('run-123', {
  limit: 10,
  offset: 20,
});

getRollup

Get aggregated usage rollup.

typescript
// This agent's usage only
const rollup = await usageStore.getRollup('run-123');

// Include sub-agent usage (lazy aggregation)
const totalRollup = await usageStore.getRollup('run-123', {
  includeSubAgents: true,
});

exists

Check if usage data exists for a run.

typescript
const hasUsage = await usageStore.exists('run-123');

delete

Delete usage data for a run.

typescript
await usageStore.delete('run-123');

getEntryCount

Get entry count without fetching entries.

typescript
const count = await usageStore.getEntryCount('run-123');

findRunIds

Find all tracked run IDs with optional filters.

typescript
// All run IDs
const runIds = await usageStore.findRunIds();

// With filters
const filteredRunIds = await usageStore.findRunIds({
  agentType: 'researcher',
  limit: 100,
});

deleteOldEntries

Delete entries older than a specified age.

typescript
// Delete entries older than 7 days
await usageStore.deleteOldEntries(7 * 24 * 60 * 60 * 1000);

Database Schema

The D1 migration creates this table:

sql
CREATE TABLE usage_entries (
  id TEXT PRIMARY KEY,
  run_id TEXT NOT NULL,
  kind TEXT NOT NULL,
  step_count INTEGER NOT NULL,
  timestamp INTEGER NOT NULL,
  source_type TEXT NOT NULL,
  source_name TEXT NOT NULL,
  data TEXT NOT NULL,
  created_at TEXT NOT NULL DEFAULT (datetime('now'))
);

CREATE INDEX idx_usage_entries_run_id ON usage_entries(run_id);
CREATE INDEX idx_usage_entries_kind ON usage_entries(kind);
CREATE INDEX idx_usage_entries_timestamp ON usage_entries(timestamp);

Migration

Run migration to create the usage table:

typescript
import { runMigration } from '@helix-agents/store-cloudflare';

// Run on worker startup or via scheduled task
await runMigration(env.AGENT_DB);

Complete Example

typescript
import {
  createCloudflareStore,
  D1UsageStore,
  runMigration,
} from '@helix-agents/store-cloudflare';

interface Env {
  AGENT_DB: D1Database;
  STREAMS: DurableObjectNamespace;
}

export default {
  async fetch(request: Request, env: Env) {
    await runMigration(env.AGENT_DB);

    const { stateStore, streamManager } = createCloudflareStore({
      db: env.AGENT_DB,
      streams: env.STREAMS,
    });

    const usageStore = new D1UsageStore({ database: env.AGENT_DB });

    // Execute agent with usage tracking
    const handle = await executor.execute(agent, 'Task', { usageStore });
    await handle.result();

    // Get usage data
    const rollup = await handle.getUsageRollup();

    return Response.json({
      tokens: rollup?.tokens.total,
      toolCalls: rollup?.toolStats.totalCalls,
    });
  },

  // Scheduled cleanup
  async scheduled(event: ScheduledEvent, env: Env) {
    const usageStore = new D1UsageStore({ database: env.AGENT_DB });
    // Delete entries older than 30 days
    await usageStore.deleteOldEntries(30 * 24 * 60 * 60 * 1000);
  },
};

Re-exported Types

For convenience, core types are re-exported:

typescript
import type {
  StateStore,
  StreamManager,
  StreamWriter,
  StreamReader,
  StreamChunk,
  ResumableStreamReader,
  ResumableReaderOptions,
  StreamInfo,
  ResumableStreamStatus,
  AgentState,
  Message,
  AgentStatus,
  MergeChanges,
} from '@helix-agents/store-cloudflare';

See Also

Released under the MIT License.