Custom UI Guide

Build a chat interface from scratch using headless hooks.

This guide shows a complete headless composition with:

  1. ChatProvider for backend configuration
  2. useThreadList() for the sidebar
  3. useThread() for messages and the composer

The goal is to show how those pieces fit together in one working example, not to prescribe a specific visual design.

import { useState } from "react";
import {
  ChatProvider,
  openAIMessageFormat,
  openAIReadableStreamAdapter,
  useThread,
  useThreadList,
} from "@openuidev/react-headless";

function ThreadSidebar() {
  const { threads, selectedThreadId, isLoadingThreads, selectThread, switchToNewThread } =
    useThreadList();

  return (
    <aside>
      <button onClick={switchToNewThread}>New chat</button>
      {isLoadingThreads ? <p>Loading threads...</p> : null}
      {threads.map((thread) => (
        <button
          key={thread.id}
          onClick={() => selectThread(thread.id)}
          aria-pressed={thread.id === selectedThreadId}
        >
          {thread.title}
        </button>
      ))}
    </aside>
  );
}

function MessageList() {
  const { messages, isRunning } = useThread();

  return (
    <div>
      {messages.map((message) => (
        <div key={message.id}>
          <strong>{message.role}:</strong> {String(message.content ?? "")}
        </div>
      ))}
      {isRunning ? <p>Thinking...</p> : null}
    </div>
  );
}

function Composer() {
  const { processMessage, cancelMessage, isRunning } = useThread();
  const [input, setInput] = useState("");

  return (
    <form
      onSubmit={(event) => {
        event.preventDefault();
        if (!input.trim() || isRunning) return;
        processMessage({ role: "user", content: input });
        setInput("");
      }}
    >
      <input
        value={input}
        onChange={(event) => setInput(event.target.value)}
        placeholder="Ask anything..."
      />
      {isRunning ? (
        <button type="button" onClick={cancelMessage}>
          Stop
        </button>
      ) : (
        <button type="submit">Send</button>
      )}
    </form>
  );
}

function CustomChat() {
  return (
    <div className="chat-app">
      <ThreadSidebar />
      <main>
        <MessageList />
        <Composer />
      </main>
    </div>
  );
}

export default function App() {
  return (
    <ChatProvider
      processMessage={async ({ messages, abortController }) => {
        return fetch("/api/chat", {
          method: "POST",
          headers: { "Content-Type": "application/json" },
          body: JSON.stringify({
            messages: openAIMessageFormat.toApi(messages),
          }),
          signal: abortController.signal,
        });
      }}
      threadApiUrl="/api/threads"
      streamProtocol={openAIReadableStreamAdapter()}
      messageFormat={openAIMessageFormat}
    >
      <CustomChat />
    </ChatProvider>
  );
}

This example uses the same backend assumptions as the built-in layouts:

  • openAIMessageFormat.toApi(messages) is called explicitly in processMessage to convert messages to OpenAI format — the messageFormat prop does not transform messages for processMessage
  • messageFormat={openAIMessageFormat} is still needed here because threadApiUrl is set — it tells the UI how to convert messages when loading saved thread history
  • openAIReadableStreamAdapter() matches response.toReadableStream()
  • threadApiUrl enables saved thread history

If you want Generative UI in a headless build, you also need to render structured assistant content yourself instead of relying on the built-in componentLibrary behavior from the layout components.

On this page