Code Examples

Complete working examples for different platforms and languages. All examples use the same protocol and are compatible with OpenAI Realtime API clients.

JavaScript / Browser

Full-featured browser client with WebRTC, data channel, and function calling support.

Complete Browser Example
class VoiceClient {
  constructor(clientId, clientSecret, serverUrl = 'https://api.itannix.com') {
    this.clientId = clientId;
    this.clientSecret = clientSecret; // Device-generated secret
    this.serverUrl = serverUrl;
    this.peerConnection = null;
    this.dataChannel = null;
    this.session = null;
  }

  async connect() {
    // 1. Create session
    const sessionResponse = await fetch(`${this.serverUrl}/v1/realtime/sessions`, {
      method: 'POST',
      headers: {
        'Content-Type': 'application/json',
        'X-Client-Id': this.clientId,
        'X-Client-Secret': this.clientSecret
      },
      body: JSON.stringify({
        modalities: ['text', 'audio']
      })
    });

    if (!sessionResponse.ok) {
      throw new Error(`Session creation failed: ${sessionResponse.status}`);
    }

    this.session = await sessionResponse.json();
    const { iceServers } = this.session;

    // 2. Create peer connection
    this.peerConnection = new RTCPeerConnection({
      iceServers: iceServers || [
        { urls: 'stun:stun.cloudflare.com:3478' }
      ]
    });

    // 3. Create data channel
    this.dataChannel = this.peerConnection.createDataChannel('messages', {
      ordered: true
    });

    this.dataChannel.onopen = () => {
      console.log('Data channel opened');
    };

    this.dataChannel.onmessage = (event) => {
      const message = JSON.parse(event.data);
      this.handleMessage(message);
    };

    // 4. Get user media
    const stream = await navigator.mediaDevices.getUserMedia({
      audio: {
        sampleRate: 48000,
        channelCount: 1,
        echoCancellation: true,
        noiseSuppression: true
      }
    });

    stream.getAudioTracks().forEach(track => {
      this.peerConnection.addTrack(track, stream);
    });

    // 5. Create and send offer
    const offer = await this.peerConnection.createOffer();
    await this.peerConnection.setLocalDescription(offer);

    // Wait for ICE gathering
    await new Promise((resolve) => {
      if (this.peerConnection.iceGatheringState === 'complete') {
        resolve();
      } else {
        this.peerConnection.onicegatheringstatechange = () => {
          if (this.peerConnection.iceGatheringState === 'complete') {
            resolve();
          }
        };
      }
    });

    // 6. Send SDP to server
    const sdpResponse = await fetch(`${this.serverUrl}/v1/realtime`, {
      method: 'POST',
      headers: {
        'Content-Type': 'application/sdp',
        'X-Client-Id': this.clientId,
        'X-Client-Secret': this.clientSecret
      },
      body: this.peerConnection.localDescription.sdp
    });

    if (!sdpResponse.ok) {
      throw new Error(`SDP exchange failed: ${sdpResponse.status}`);
    }

    // 7. Set remote description
    const answerSdp = await sdpResponse.text();
    await this.peerConnection.setRemoteDescription({
      type: 'answer',
      sdp: answerSdp
    });

    // 8. Handle remote audio
    this.peerConnection.ontrack = (event) => {
      const remoteStream = event.streams[0];
      const audio = new Audio();
      audio.srcObject = remoteStream;
      audio.autoplay = true;
      audio.play();
    };

    console.log('Connected!');
  }

  handleMessage(message) {
    // Handle user transcript
    if (message.type === 'conversation.item.input_audio_transcription.completed') {
      console.log('User:', message.transcript);
      return;
    }

    // Handle assistant transcript
    if (message.type === 'response.audio_transcript.delta') {
      console.log('Assistant (streaming):', message.delta);
      return;
    }

    if (message.type === 'response.audio_transcript.done') {
      console.log('Assistant (complete):', message.transcript);
      return;
    }

    // Handle function calls
    if (message.type === 'response.output_item.done' && message.item?.type === 'function_call') {
      const { call_id, name, arguments: args } = message.item;
      console.log('Function call:', name, args);
      
      // Execute function and send result
      this.sendFunctionResult(call_id, { result: 'success' });
    }
  }

  sendFunctionResult(callId, result) {
    if (!this.dataChannel || this.dataChannel.readyState !== 'open') {
      return;
    }

    this.dataChannel.send(JSON.stringify({
      type: 'conversation.item.create',
      item: {
        type: 'function_call_output',
        call_id: callId,
        output: JSON.stringify(result)
      }
    }));

    // Trigger response generation
    this.dataChannel.send(JSON.stringify({
      type: 'response.create'
    }));
  }

  disconnect() {
    if (this.dataChannel) {
      this.dataChannel.close();
    }
    if (this.peerConnection) {
      this.peerConnection.close();
    }
  }
}

// Usage
const client = new VoiceClient('YOUR_CLIENT_ID');
await client.connect();

ESP32 / Arduino

ESP32 example using WebRTC libraries. Note: WebRTC on embedded devices requires significant resources.

ESP32 Example (Simplified)
#include <WiFi.h>
#include <HTTPClient.h>
#include <ArduinoJson.h>
#include <Preferences.h>

const char* ssid = "YOUR_WIFI_SSID";
const char* password = "YOUR_WIFI_PASSWORD";
const char* clientId = "YOUR_CLIENT_ID";
const char* serverUrl = "https://api.itannix.com";

Preferences preferences;
String clientSecret;

void setup() {
  Serial.begin(115200);
  
  // Load or generate client secret (TOFU)
  preferences.begin("credentials", false);
  clientSecret = preferences.getString("secret", "");
  if (clientSecret.isEmpty()) {
    // Generate and store a new secret
    clientSecret = generateSecret();
    preferences.putString("secret", clientSecret);
    Serial.println("Generated new client secret");
  }
  preferences.end();
  
  // Connect to WiFi
  WiFi.begin(ssid, password);
  while (WiFi.status() != WL_CONNECTED) {
    delay(500);
    Serial.print(".");
  }
  Serial.println("WiFi connected");
  
  // Create session
  createSession();
}

String generateSecret() {
  String secret = "";
  for (int i = 0; i < 32; i++) {
    secret += String(random(0, 16), HEX);
  }
  return secret;
}

void createSession() {
  HTTPClient http;
  http.begin(String(serverUrl) + "/v1/realtime/sessions");
  http.addHeader("Content-Type", "application/json");
  http.addHeader("X-Client-Id", clientId);
  http.addHeader("X-Client-Secret", clientSecret);
  
  String payload = "{"modalities":["text","audio"]}";
  int httpResponseCode = http.POST(payload);
  
  if (httpResponseCode > 0) {
    String response = http.getString();
    Serial.println("Session created:");
    Serial.println(response);
    
    // Parse JSON response
    DynamicJsonDocument doc(2048);
    deserializeJson(doc, response);
    
    String sessionId = doc["id"].as<String>();
    
    Serial.print("Session ID: ");
    Serial.println(sessionId);
    
    // TODO: Implement WebRTC connection
  } else {
    Serial.print("Error creating session: ");
    Serial.println(httpResponseCode);
  }
  
  http.end();
}

void loop() {
  // WebRTC connection handling would go here
  delay(1000);
}

Note: Full WebRTC implementation on ESP32 is complex and may require additional hardware or a WebRTC gateway. Consider using a WebRTC-to-WebSocket bridge for embedded devices.

Using Existing OpenAI Realtime Clients

If you already have code using OpenAI's Realtime API, you can adapt it with minimal changes:

OpenAI Client → ItanniX
// Before (OpenAI)
const openaiClient = new OpenAI({
  apiKey: 'sk-...',
  baseURL: 'https://api.openai.com/v1'
});

// After (ItanniX) - Change domain and add client credentials
const itannixClient = {
  baseURL: 'https://api.itannix.com',
  clientId: 'YOUR_CLIENT_ID',
  clientSecret: 'YOUR_GENERATED_SECRET', // Device-generated secret
  
  async createSession() {
    const response = await fetch(`${this.baseURL}/v1/realtime/sessions`, {
      method: 'POST',
      headers: {
        'Content-Type': 'application/json',
        'X-Client-Id': this.clientId,
        'X-Client-Secret': this.clientSecret
      },
      body: JSON.stringify({
        modalities: ['text', 'audio']
        // No need for model, voice, instructions - auto-configured!
      })
    });
    return response.json();
  },
  
  async connectWebRTC(session) {
    // WebRTC connection with your device credentials
    const response = await fetch(`${this.baseURL}/v1/realtime`, {
      method: 'POST',
      headers: {
        'Content-Type': 'application/sdp',
        'X-Client-Id': this.clientId,
        'X-Client-Secret': this.clientSecret
      },
      body: sdpOffer
    });
    return response.text();
  }
};