// Quick Start

Get your first tick in 3 steps:

StepAction
1. Get an API keyContact the Fermi team to receive your key (UUID format)
2. Choose a protocolWebSocket for browser/JSON clients, gRPC for backend/high-throughput
3. Connect and consumeSee examples below. Ticks arrive in batches every ~100ms
GET /health

Public health check. No auth required. Use this to verify the proxy is up before connecting.

$ curl https://tickstream.fermi.trade/health
{"status":"ok","upstream":"connected","receivers":3}

// Authentication

All streaming endpoints require an API key. Each key has a per-key concurrent connection limit (default: 5). Opening a 6th connection with the same key returns 429 / RESOURCE_EXHAUSTED until an existing connection closes.

ProtocolHow to Authenticate
WebSocketQuery parameter: ?api_key=YOUR_KEY
gRPCMetadata header: x-api-key: YOUR_KEY
Key Format: API keys are UUIDs (e.g. a1b2c3d4-e5f6-7890-abcd-ef1234567890). Keys can be enabled/disabled by the admin without regeneration. Disabled keys are rejected immediately.

// WebSocket

WS wss://tickstream.fermi.trade/ticks?api_key=YOUR_KEY

Stream ticks as JSON over WebSocket. Ticks are batched — each message contains up to 1,000 ticks.

Server Message Types

typeFieldsDescription
"ticks"count, data[]Batch of ticks. count is the array length, data contains tick objects.
"status"upstreamUpstream state changed (e.g. "reconnecting", "connected"). You stay connected — ticks resume when upstream recovers.
"lag"droppedYour client fell behind. dropped ticks were skipped. Process ticks faster or offload to a queue.

Production Example

Node.js
Python
Go
Browser
// npm install ws
const WebSocket = require("ws");

const API_KEY = process.env.FERMI_API_KEY;
const URL = `wss://tickstream.fermi.trade/ticks?api_key=${API_KEY}`;

function connect() {
  const ws = new WebSocket(URL);
  let tickCount = 0;

  ws.on("message", (raw) => {
    const msg = JSON.parse(raw);
    switch (msg.type) {
      case "ticks":
        for (const tick of msg.data) {
          tickCount++;
          // Process tick — offload heavy work to avoid lag
          processTick(tick);
        }
        break;
      case "lag":
        console.warn(`Dropped ${msg.dropped} ticks — consumer too slow`);
        break;
      case "status":
        console.log(`Upstream: ${msg.upstream}`);
        break;
    }
  });

  ws.on("close", () => {
    console.log("Disconnected. Reconnecting in 3s...");
    setTimeout(connect, 3000);
  });

  ws.on("error", (err) => console.error("WS error:", err.message));
}

function processTick(tick) {
  // Your logic here. tick.tick_number, tick.transactions, etc.
}

connect();
# pip install websockets
import asyncio, json, os
import websockets

API_KEY = os.environ["FERMI_API_KEY"]
URL = f"wss://tickstream.fermi.trade/ticks?api_key={API_KEY}"

async def stream():
    while True:
        try:
            async with websockets.connect(URL) as ws:
                print("Connected")
                async for raw in ws:
                    msg = json.loads(raw)
                    if msg["type"] == "ticks":
                        for tick in msg["data"]:
                            process_tick(tick)
                    elif msg["type"] == "lag":
                        print(f"Warning: dropped {msg['dropped']} ticks")
                    elif msg["type"] == "status":
                        print(f"Upstream: {msg['upstream']}")
        except (websockets.ConnectionClosed, OSError) as e:
            print(f"Disconnected: {e}. Reconnecting in 3s...")
            await asyncio.sleep(3)

def process_tick(tick):
    # tick["tick_number"], tick["transactions"], etc.
    pass

asyncio.run(stream())
// go get github.com/gorilla/websocket
package main

import (
    "encoding/json"
    "fmt"
    "log"
    "os"
    "time"
    "github.com/gorilla/websocket"
)

type Message struct {
    Type    string          `json:"type"`
    Count   int             `json:"count"`
    Data    json.RawMessage `json:"data"`
    Dropped int             `json:"dropped"`
}

func main() {
    url := fmt.Sprintf("wss://tickstream.fermi.trade/ticks?api_key=%s",
        os.Getenv("FERMI_API_KEY"))

    for {
        c, _, err := websocket.DefaultDialer.Dial(url, nil)
        if err != nil {
            log.Printf("Connect failed: %v. Retrying in 3s...", err)
            time.Sleep(3 * time.Second)
            continue
        }
        log.Println("Connected")
        for {
            _, raw, err := c.ReadMessage()
            if err != nil {
                log.Printf("Read error: %v. Reconnecting...", err)
                break
            }
            var msg Message
            json.Unmarshal(raw, &msg)
            switch msg.Type {
            case "ticks":
                // Process msg.Data ([]Tick as JSON)
                fmt.Printf("Received %d ticks\n", msg.Count)
            case "lag":
                log.Printf("Dropped %d ticks", msg.Dropped)
            }
        }
        c.Close()
        time.Sleep(3 * time.Second)
    }
}
// Browser — no dependencies needed
const API_KEY = "your-api-key-here";

function connect() {
  const ws = new WebSocket(
    `wss://tickstream.fermi.trade/ticks?api_key=${API_KEY}`
  );

  ws.onmessage = (e) => {
    const msg = JSON.parse(e.data);
    if (msg.type === "ticks") {
      msg.data.forEach(tick => {
        console.log(
          `Tick #${tick.tick_number} | ` +
          `${tick.transactions.length} txns`
        );
      });
    }
  };

  ws.onclose = () => setTimeout(connect, 3000);
  ws.onerror = (e) => console.error("WS error", e);
}

connect();

// gRPC

GRPC tickstream.fermi.trade:443

Stream ticks via gRPC with TLS. Binary protobuf encoding — higher throughput than WebSocket JSON.

Proto File

Download the proto and generate client stubs for your language. Only StreamTicks is available through the proxy.

syntax = "proto3";
package continuum.sequencer.v1;

service SequencerService {
  rpc StreamTicks(StreamTicksRequest) returns (stream Tick);
}

message StreamTicksRequest {
  uint64 start_tick = 1;  // Always pass 0 (stream from latest)
}

message Tick {
  uint64   tick_number           = 1;
  VdfProof vdf_proof              = 2;
  repeated OrderedTransaction transactions = 3;
  string   transaction_batch_hash = 4;
  uint64   timestamp              = 5;
  string   previous_output        = 6;
}

message VdfProof {
  string input      = 1;  // hex
  string output     = 2;  // hex
  string proof      = 3;  // hex
  uint64 iterations  = 4;
}

message OrderedTransaction {
  Transaction transaction       = 1;
  uint64      sequence_number   = 2;
  string      tx_hash           = 3;
  uint64      ingestion_timestamp = 4;
}

message Transaction {
  string tx_id      = 1;
  bytes  payload    = 2;
  bytes  signature  = 3;
  bytes  public_key = 4;
  uint64 nonce      = 5;
  uint64 timestamp  = 6;  // microseconds since epoch
}

Production Example

Rust
Python
Go
Node.js
// cargo add tonic tonic-build prost tokio
use tonic::transport::{Channel, ClientTlsConfig};
use tonic::{Request, Status};
use tokio::time::{sleep, Duration};

async fn stream_ticks() -> Result<(), Box<dyn std::error::Error>> {
    let tls = ClientTlsConfig::new()
        .domain_name("tickstream.fermi.trade");
    let channel = Channel::from_static("https://tickstream.fermi.trade")
        .tls_config(tls)?
        .connect().await?;

    let mut client = SequencerServiceClient::new(channel);

    loop {
        let mut req = Request::new(StreamTicksRequest { start_tick: 0 });
        req.metadata_mut().insert(
            "x-api-key",
            std::env::var("FERMI_API_KEY")?.parse()?,
        );

        match client.stream_ticks(req).await {
            Ok(response) => {
                let mut stream = response.into_inner();
                while let Ok(Some(tick)) = stream.message().await {
                    process_tick(&tick);
                }
                eprintln!("Stream ended. Reconnecting...");
            }
            Err(e) => eprintln!("Connect error: {}. Retrying...", e),
        }
        sleep(Duration::from_secs(3)).await;
    }
}
# pip install grpcio grpcio-tools
# python -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. sequencer.proto
import grpc, os, time
from sequencer_pb2 import StreamTicksRequest
from sequencer_pb2_grpc import SequencerServiceStub

def stream():
    api_key = os.environ["FERMI_API_KEY"]
    creds = grpc.ssl_channel_credentials()

    while True:
        try:
            channel = grpc.secure_channel("tickstream.fermi.trade:443", creds)
            stub = SequencerServiceStub(channel)
            metadata = [("x-api-key", api_key)]

            for tick in stub.StreamTicks(StreamTicksRequest(), metadata=metadata):
                process_tick(tick)

            print("Stream ended. Reconnecting...")
        except grpc.RpcError as e:
            print(f"gRPC error: {e.code()} {e.details()}. Retrying...")
        time.sleep(3)

def process_tick(tick):
    print(f"Tick #{tick.tick_number} — {len(tick.transactions)} txns")

stream()
// protoc --go_out=. --go-grpc_out=. sequencer.proto
package main

import (
    "context"
    "crypto/tls"
    "io"
    "log"
    "os"
    "time"
    "google.golang.org/grpc"
    "google.golang.org/grpc/credentials"
    "google.golang.org/grpc/metadata"
    pb "your/module/sequencer"
)

func main() {
    creds := credentials.NewTLS(&tls.Config{})
    conn, err := grpc.Dial("tickstream.fermi.trade:443",
        grpc.WithTransportCredentials(creds))
    if err != nil { log.Fatal(err) }
    defer conn.Close()

    client := pb.NewSequencerServiceClient(conn)

    for {
        ctx := metadata.AppendToOutgoingContext(
            context.Background(),
            "x-api-key", os.Getenv("FERMI_API_KEY"))

        stream, err := client.StreamTicks(ctx,
            &pb.StreamTicksRequest{StartTick: 0})
        if err != nil {
            log.Printf("Connect: %v. Retrying...", err)
            time.Sleep(3 * time.Second)
            continue
        }

        for {
            tick, err := stream.Recv()
            if err == io.EOF { break }
            if err != nil {
                log.Printf("Stream: %v", err)
                break
            }
            processTick(tick)
        }
        time.Sleep(3 * time.Second)
    }
}
// npm install @grpc/grpc-js @grpc/proto-loader
const grpc = require("@grpc/grpc-js");
const protoLoader = require("@grpc/proto-loader");

const pkgDef = protoLoader.loadSync("sequencer.proto");
const proto = grpc.loadPackageDefinition(pkgDef)
    .continuum.sequencer.v1;

function connect() {
  const client = new proto.SequencerService(
    "tickstream.fermi.trade:443",
    grpc.credentials.createSsl()
  );

  const meta = new grpc.Metadata();
  meta.add("x-api-key", process.env.FERMI_API_KEY);

  const stream = client.StreamTicks(
    { start_tick: 0 }, meta
  );

  stream.on("data", (tick) => {
    console.log(
      `Tick #${tick.tick_number} — ` +
      `${tick.transactions.length} txns`
    );
  });

  stream.on("error", (err) => {
    console.error(`gRPC error: ${err.message}. Retrying...`);
    setTimeout(connect, 3000);
  });

  stream.on("end", () => {
    console.log("Stream ended. Reconnecting...");
    setTimeout(connect, 3000);
  });
}

connect();

// Batching Behavior

The proxy does not send ticks one-by-one. Ticks are buffered and flushed in batches for efficiency.

ParameterValueDescription
Flush interval100msBuffered ticks are flushed every 100ms regardless of count
Max batch size1,000 ticksWebSocket messages are chunked at 1,000 ticks to stay under frame limits
Tick rate~5,800/sExpect ~580 ticks per batch at full throughput, delivered ~10 times per second

WebSocket vs gRPC

WebSocketgRPC
EncodingJSON (strings for uint64)Protobuf (native types)
BatchingServer-side batched into JSON arraysIndividual Tick messages streamed (server batches internally)
Best forBrowsers, rapid prototyping, JS/TSBackend services, high-throughput consumers
Lag handlingLag message with dropped countSkipped ticks logged server-side, stream continues
Tip: If you receive "lag" messages, your consumer is too slow. Offload tick processing to a background worker or queue instead of processing inline.

// Data Types

Tick

FieldWS TypegRPC TypeDescription
tick_numberstringuint64Monotonically increasing tick sequence number
timestampstringuint64Microseconds since Unix epoch when tick was produced
previous_outputstring (hex)stringPrevious tick's VDF output — used for chain verification
transaction_batch_hashstring (hex)stringMerkle hash of all transactions in this tick
vdf_proofobjectVdfProofVerifiable Delay Function proof (see below)
transactionsarrayrepeatedOrdered transactions included in this tick (may be empty)

VdfProof

FieldWS TypegRPC TypeDescription
inputstring (hex)stringVDF input seed
outputstring (hex)stringVDF computed output
proofstring (hex)stringProof that output was correctly computed from input
iterationsstringuint64Number of VDF iterations performed

OrderedTransaction

FieldWS TypegRPC TypeDescription
transactionobjectTransactionThe original submitted transaction
sequence_numberstringuint64Position within the tick (0-indexed)
tx_hashstring (hex)stringUnique transaction hash
ingestion_timestampstringuint64When the sequencer received this transaction (microseconds)

Transaction

FieldWS TypegRPC TypeDescription
tx_idstringstringClient-provided transaction identifier
payloadstring (hex)bytesApplication-specific transaction data
signaturestring (hex)bytesClient's cryptographic signature
public_keystring (hex)bytesClient's public key
noncestringuint64Replay protection nonce
timestampstringuint64Client-provided timestamp (microseconds)
WebSocket Encoding: All uint64 fields are JSON strings to avoid JavaScript precision loss (Numbers above 2^53 are unsafe). All bytes fields are hex-encoded strings. gRPC uses native protobuf types.

Example Tick Message (WebSocket)

{
  "type": "ticks",
  "count": 2,
  "data": [
    {
      "tick_number": "48291053",
      "timestamp": "1710523847000000",
      "previous_output": "a1b2c3d4...",
      "transaction_batch_hash": "e5f6a7b8...",
      "vdf_proof": {
        "input": "0a1b...", "output": "c3d4...",
        "proof": "e5f6...", "iterations": "1000"
      },
      "transactions": []
    },
    {
      "tick_number": "48291054",
      "timestamp": "1710523847172000",
      ...
    }
  ]
}

// Errors & Limits

CodeProtocolMeaningWhat to Do
401 / UNAUTHENTICATEDBothInvalid, missing, or disabled API keyCheck your key. Contact admin if disabled.
429 / RESOURCE_EXHAUSTEDBothConnection limit exceeded for this keyClose unused connections. Default limit: 5 per key.
UNAVAILABLEgRPCProxy shutting downReconnect after a few seconds.

Reconnection Strategy

The proxy auto-reconnects to the sequencer on failure. During reconnection, your WebSocket gets a "status" message — you stay connected and ticks resume automatically. For client-side disconnects (network issues, server restart), implement reconnection with backoff:

// Recommended: exponential backoff with jitter
delay = min(30, 3 * 2^attempt) + random(0, 1)  // seconds