The voice chat packages (@zooly/voice-chat-client and @zooly/voice-chat-srv) are consumed by zooly-app through API routes and a voiceCallLogApi adapter.
To integrate voice chat into an app:
@zooly/voice-chat-client and @zooly/voice-chat-srv to package.jsontranspilePackages in next.config.tstsconfig.jsonvoiceCallLogApi adapteruseVoiceConversation hook in a component or context providerPOST /api/voice-call-log/createimport { NextRequest, NextResponse } from "next/server";
import { createVoiceCallLogHandler } from "@zooly/voice-chat-srv";
export async function POST(request: NextRequest) {
try {
const cookieHeader = request.headers.get("cookie") || "";
if (!cookieHeader || !cookieHeader.includes("auth-token")) {
return NextResponse.json({ error: "Not authenticated" }, { status: 401 });
}
const { agentId } = await request.json();
const voiceCallLog = await createVoiceCallLogHandler(cookieHeader, agentId);
return NextResponse.json({ voiceCallLog: { id: voiceCallLog.id } });
} catch (error: any) {
return NextResponse.json(
{ error: error.message || "Failed to create voice call log" },
{ status: error.status || 500 }
);
}
}
PUT /api/voice-call-log/updateimport { NextRequest, NextResponse } from "next/server";
import { updateVoiceCallLogHandler } from "@zooly/voice-chat-srv";
export async function PUT(request: NextRequest) {
try {
const cookieHeader = request.headers.get("cookie") || "";
if (!cookieHeader || !cookieHeader.includes("auth-token")) {
return NextResponse.json({ error: "Not authenticated" }, { status: 401 });
}
const { id, voiceCallMessages } = await request.json();
await updateVoiceCallLogHandler(id, voiceCallMessages);
return NextResponse.json({ success: true });
} catch (error: any) {
return NextResponse.json(
{ error: error.message || "Failed to update voice call log" },
{ status: error.status || 500 }
);
}
}
The consuming app creates a voiceCallLogApi adapter that bridges the hook to its API routes:
import { useVoiceConversation } from "@zooly/voice-chat-client";
import { zFetch } from "@/fetching/fetchWrapper";
import type { VoiceCallMessage } from "@zooly/types";
const voiceCallLogApi = {
create: async (params: { accountId: string | null; agentId: string }) => {
const res = await zFetch("/api/voice-call-log/create", {
method: "POST",
body: params,
});
return { id: res.voiceCallLog.id };
},
update: async (params: { id: string; voiceCallMessages: VoiceCallMessage[] }) => {
await zFetch("/api/voice-call-log/update", {
method: "PUT",
body: params,
});
},
};
const voiceChat = useVoiceConversation({
accountId: currentAccount?.id ?? null,
voiceCallLogApi,
onConnect: () => console.log("Connected!"),
onDisconnect: () => console.log("Disconnected!"),
onError: (err) => handleError(err),
onMessage: (msg) => handleNewMessage(msg),
onCallStarted: ({ agentId }) => {
logUserJourneyEventApi(UserJourneyEvents.VOICE_CALL_STARTED, { agentId });
},
onCallEnded: ({ agentId }) => {
logUserJourneyEventApi(UserJourneyEvents.VOICE_CALL_ENDED, { agentId });
},
});
requestMicrophonePermission() or let startConversation handle itstartConversation(agentId) with the ElevenLabs agent IDvoiceCallMessages to render the transcriptisAiSpeaking / isUserSpeaking / conversationMode / getStateLabel() for visual feedbackendConversation() when the user ends the call