Chatbot

Reasoning

A collapsible component that displays AI reasoning content, automatically opening during streaming and closing when finished.

The Reasoning component displays AI reasoning content, automatically opening during streaming and closing when finished.

"use client";import {  Reasoning,  ReasoningContent,  ReasoningTrigger,} from "@/components/ai-elements/elements/reasoning";import { useCallback, useEffect, useState } from "react";const reasoningSteps = [  "Let me think about this problem step by step.",  "\n\nFirst, I need to understand what the user is asking for.",  "\n\nThey want a reasoning component that opens automatically when streaming begins and closes when streaming finishes. The component should be composable and follow existing patterns in the codebase.",  "\n\nThis seems like a collapsible component with state management would be the right approach.",].join("");const Example = () => {  const [content, setContent] = useState("");  const [isStreaming, setIsStreaming] = useState(false);  const [currentTokenIndex, setCurrentTokenIndex] = useState(0);  const [tokens, setTokens] = useState<string[]>([]);  // Function to chunk text into fake tokens of 3-4 characters  const chunkIntoTokens = useCallback((text: string): string[] => {    const tokens: string[] = [];    let i = 0;    while (i < text.length) {      const chunkSize = Math.floor(Math.random() * 2) + 3; // Random size between 3-4      tokens.push(text.slice(i, i + chunkSize));      i += chunkSize;    }    return tokens;  }, []);  useEffect(() => {    const tokenizedSteps = chunkIntoTokens(reasoningSteps);    setTokens(tokenizedSteps);    setContent("");    setCurrentTokenIndex(0);    setIsStreaming(true);  }, [chunkIntoTokens]);  useEffect(() => {    if (!isStreaming || currentTokenIndex >= tokens.length) {      if (isStreaming) {        setIsStreaming(false);      }      return;    }    const timer = setTimeout(() => {      setContent((prev) => prev + tokens[currentTokenIndex]);      setCurrentTokenIndex((prev) => prev + 1);    }, 25); // Faster interval since we're streaming smaller chunks    return () => clearTimeout(timer);  }, [isStreaming, currentTokenIndex, tokens]);  return (    <div className="w-full p-4" style={{ height: "300px" }}>      <Reasoning className="w-full" isStreaming={isStreaming}>        <ReasoningTrigger />        <ReasoningContent>{content}</ReasoningContent>      </Reasoning>    </div>  );};export default Example;

Installation

npx ai-elements@latest add reasoning
npx shadcn@latest add @ai-elements/reasoning
"use client";import { useControllableState } from "@radix-ui/react-use-controllable-state";import {  Collapsible,  CollapsibleContent,  CollapsibleTrigger,} from "@repo/shadcn-ui/components/ui/collapsible";import { cn } from "@repo/shadcn-ui/lib/utils";import { BrainIcon, ChevronDownIcon } from "lucide-react";import type { ComponentProps } from "react";import { createContext, memo, useContext, useEffect, useState } from "react";import { Response } from "./response";import { Shimmer } from "./shimmer";type ReasoningContextValue = {  isStreaming: boolean;  isOpen: boolean;  setIsOpen: (open: boolean) => void;  duration: number;};const ReasoningContext = createContext<ReasoningContextValue | null>(null);const useReasoning = () => {  const context = useContext(ReasoningContext);  if (!context) {    throw new Error("Reasoning components must be used within Reasoning");  }  return context;};export type ReasoningProps = ComponentProps<typeof Collapsible> & {  isStreaming?: boolean;  open?: boolean;  defaultOpen?: boolean;  onOpenChange?: (open: boolean) => void;  duration?: number;};const AUTO_CLOSE_DELAY = 1000;const MS_IN_S = 1000;export const Reasoning = memo(  ({    className,    isStreaming = false,    open,    defaultOpen = true,    onOpenChange,    duration: durationProp,    children,    ...props  }: ReasoningProps) => {    const [isOpen, setIsOpen] = useControllableState({      prop: open,      defaultProp: defaultOpen,      onChange: onOpenChange,    });    const [duration, setDuration] = useControllableState({      prop: durationProp,      defaultProp: 0,    });    const [hasAutoClosed, setHasAutoClosed] = useState(false);    const [startTime, setStartTime] = useState<number | null>(null);    // Track duration when streaming starts and ends    useEffect(() => {      if (isStreaming) {        if (startTime === null) {          setStartTime(Date.now());        }      } else if (startTime !== null) {        setDuration(Math.ceil((Date.now() - startTime) / MS_IN_S));        setStartTime(null);      }    }, [isStreaming, startTime, setDuration]);    // Auto-open when streaming starts, auto-close when streaming ends (once only)    useEffect(() => {      if (defaultOpen && !isStreaming && isOpen && !hasAutoClosed) {        // Add a small delay before closing to allow user to see the content        const timer = setTimeout(() => {          setIsOpen(false);          setHasAutoClosed(true);        }, AUTO_CLOSE_DELAY);        return () => clearTimeout(timer);      }    }, [isStreaming, isOpen, defaultOpen, setIsOpen, hasAutoClosed]);    const handleOpenChange = (newOpen: boolean) => {      setIsOpen(newOpen);    };    return (      <ReasoningContext.Provider        value={{ isStreaming, isOpen, setIsOpen, duration }}      >        <Collapsible          className={cn("not-prose mb-4", className)}          onOpenChange={handleOpenChange}          open={isOpen}          {...props}        >          {children}        </Collapsible>      </ReasoningContext.Provider>    );  });export type ReasoningTriggerProps = ComponentProps<typeof CollapsibleTrigger>;const getThinkingMessage = (isStreaming: boolean, duration?: number) => {  if (isStreaming || duration === 0) {    return <Shimmer duration={1}>Thinking...</Shimmer>;  }  if (duration === undefined) {    return <p>Thought for a few seconds</p>;  }  return <p>Thought for {duration} seconds</p>;};export const ReasoningTrigger = memo(  ({ className, children, ...props }: ReasoningTriggerProps) => {    const { isStreaming, isOpen, duration } = useReasoning();    return (      <CollapsibleTrigger        className={cn(          "flex w-full items-center gap-2 text-muted-foreground text-sm transition-colors hover:text-foreground",          className        )}        {...props}      >        {children ?? (          <>            <BrainIcon className="size-4" />            {getThinkingMessage(isStreaming, duration)}            <ChevronDownIcon              className={cn(                "size-4 transition-transform",                isOpen ? "rotate-180" : "rotate-0"              )}            />          </>        )}      </CollapsibleTrigger>    );  });export type ReasoningContentProps = ComponentProps<  typeof CollapsibleContent> & {  children: string;};export const ReasoningContent = memo(  ({ className, children, ...props }: ReasoningContentProps) => (    <CollapsibleContent      className={cn(        "mt-4 text-sm",        "data-[state=closed]:fade-out-0 data-[state=closed]:slide-out-to-top-2 data-[state=open]:slide-in-from-top-2 text-muted-foreground outline-none data-[state=closed]:animate-out data-[state=open]:animate-in",        className      )}      {...props}    >      <Response className="grid gap-2">{children}</Response>    </CollapsibleContent>  ));Reasoning.displayName = "Reasoning";ReasoningTrigger.displayName = "ReasoningTrigger";ReasoningContent.displayName = "ReasoningContent";

Usage

import {
  Reasoning,
  ReasoningContent,
  ReasoningTrigger,
} from '@/components/ai-elements/reasoning';
<Reasoning className="w-full" isStreaming={false}>
  <ReasoningTrigger />
  <ReasoningContent>I need to computer the square of 2.</ReasoningContent>
</Reasoning>

Usage with AI SDK

Build a chatbot with reasoning using Deepseek R1.

Add the following component to your frontend:

app/page.tsx
'use client';

import {
  Reasoning,
  ReasoningContent,
  ReasoningTrigger,
} from '@/components/ai-elements/reasoning';
import {
  Conversation,
  ConversationContent,
  ConversationScrollButton,
} from '@/components/ai-elements/conversation';
import {
  PromptInput,
  PromptInputTextarea,
  PromptInputSubmit,
} from '@/components/ai-elements/prompt-input';
import { Loader } from '@/components/ai-elements/loader';
import { Message, MessageContent } from '@/components/ai-elements/message';
import { useState } from 'react';
import { useChat } from '@ai-sdk/react';
import { Response } from @/components/ai-elements/response';

const ReasoningDemo = () => {
  const [input, setInput] = useState('');

  const { messages, sendMessage, status } = useChat();

  const handleSubmit = (e: React.FormEvent) => {
    e.preventDefault();
    sendMessage({ text: input });
    setInput('');
  };

  return (
    <div className="max-w-4xl mx-auto p-6 relative size-full rounded-lg border h-[600px]">
      <div className="flex flex-col h-full">
        <Conversation>
          <ConversationContent>
            {messages.map((message) => (
              <Message from={message.role} key={message.id}>
                <MessageContent>
                  {message.parts.map((part, i) => {
                    switch (part.type) {
                      case 'text':
                        return (
                          <Response key={`${message.id}-${i}`}>
                            {part.text}
                          </Response>
                        );
                      case 'reasoning':
                        return (
                          <Reasoning
                            key={`${message.id}-${i}`}
                            className="w-full"
                            isStreaming={status === 'streaming' && i === message.parts.length - 1 && message.id === messages.at(-1)?.id}
                          >
                            <ReasoningTrigger />
                            <ReasoningContent>{part.text}</ReasoningContent>
                          </Reasoning>
                        );
                    }
                  })}
                </MessageContent>
              </Message>
            ))}
            {status === 'submitted' && <Loader />}
          </ConversationContent>
          <ConversationScrollButton />
        </Conversation>

        <PromptInput
          onSubmit={handleSubmit}
          className="mt-4 w-full max-w-2xl mx-auto relative"
        >
          <PromptInputTextarea
            value={input}
            placeholder="Say something..."
            onChange={(e) => setInput(e.currentTarget.value)}
            className="pr-12"
          />
          <PromptInputSubmit
            status={status === 'streaming' ? 'streaming' : 'ready'}
            disabled={!input.trim()}
            className="absolute bottom-1 right-1"
          />
        </PromptInput>
      </div>
    </div>
  );
};

export default ReasoningDemo;

Add the following route to your backend:

app/api/chat/route.ts
import { streamText, UIMessage, convertToModelMessages } from 'ai';

// Allow streaming responses up to 30 seconds
export const maxDuration = 30;

export async function POST(req: Request) {
  const { model, messages }: { messages: UIMessage[]; model: string } =
    await req.json();

  const result = streamText({
    model: 'deepseek/deepseek-r1',
    messages: convertToModelMessages(messages),
  });

  return result.toUIMessageStreamResponse({
    sendReasoning: true,
  });
}

Features

  • Automatically opens when streaming content and closes when finished
  • Manual toggle control for user interaction
  • Smooth animations and transitions powered by Radix UI
  • Visual streaming indicator with pulsing animation
  • Composable architecture with separate trigger and content components
  • Built with accessibility in mind including keyboard navigation
  • Responsive design that works across different screen sizes
  • Seamlessly integrates with both light and dark themes
  • Built on top of shadcn/ui Collapsible primitives
  • TypeScript support with proper type definitions

Props

<Reasoning />

Prop

Type

<ReasoningTrigger />

Prop

Type

<ReasoningContent />

Prop

Type