Hi @anshumanb, thanks for your help again
I have tried it many times, and the streaming problem has been solved using your method, but my front end still takes a long time to return the first block of stream content. Can you help me take a look? I will post the code.
my back-end code:
try {
const coreMessages = convertToCoreMessages(messages);
const encoder = new TextEncoder();
const stream = new TransformStream();
const writer = stream.writable.getWriter();
streamText({
model: vertexProvider(model.apiIdentifier),
system: modelId === ‘claude-3-5-coder’ ? CodePrompt : regularPrompt,
messages: coreMessages,
maxSteps: 5,
onFinish: async ({ responseMessages }) => {
if (session.user?.id) {
try {
const sanitizedMessages = sanitizeResponseMessages(responseMessages);
await saveChat({
id,
messages: […coreMessages, …sanitizedMessages],
userId: session.user.id,
});
} catch (error) {
console.error(‘Failed to save chat:’, error);
}
}
await writer.close();
}
}).then(async (result) => {
for await (const chunk of result.textStream) {
let messageContent = chunk;
try {
const parsedChunk = JSON.parse(chunk);
if (parsedChunk.type === ‘error’) {
console.error(‘AI model error:’, parsedChunk.error.message);
const errorMessage = {
id: generateUUID(),
role: ‘assistant’,
content: messageContent
};
await writer.write(
encoder.encode(data: ${JSON.stringify(errorMessage)}\n\n
)
);
await writer.close();
break;
}
} catch (e) {
}
const message = {
id: generateUUID(),
role: 'assistant',
content: messageContent
};
console.log('stream output:', messageContent);
if (messageContent.trim()) {
await writer.write(
encoder.encode(`data: ${JSON.stringify(message)}\n\n`)
);
}
}
console.log(‘Finished sending all chunks.’);
});
return new Response(stream.readable, {
headers: {
‘Content-Type’: ‘text/event-stream’,
‘Cache-Control’: ‘no-cache’,
‘Connection’: ‘keep-alive’,
},
});
} catch (error) {
console.error(‘Request error:’, error);
return new Response(‘Error processing request’, { status: 500 });
}
}
my front-end code is
export function Chat({
id,
initialMessages,
selectedModelId,
}: {
id: string;
initialMessages: Array;
selectedModelId: string;
}) {
const [isLoading, setIsLoading] = useState(false);
const [messages, setMessages] = useState(initialMessages);
const [input, setInput] = useState(‘’);
const [canvas, setCanvas] = useState<UICanvas | null>(null);
const [attachments, setAttachments] = useState();
const handleSubmit = async (e: React.FormEvent | { preventDefault?: () => void } | undefined) => {
if (e?.preventDefault) {
e.preventDefault();
}
if (!input.trim() || isLoading) return;
setIsLoading(true);
const userMessage: Message = {
id: Date.now().toString(),
role: ‘user’ as const,
content: input
};
setMessages(prev => […prev, userMessage]);
const currentInput = input;
setInput(‘’);
try {
const response = await fetch(‘/api/chat’, {
method: ‘POST’,
headers: {
‘Content-Type’: ‘application/json’,
},
body: JSON.stringify({
id,
messages: […messages, { role: ‘user’, content: currentInput }],
modelId: selectedModelId
}),
});
if (!response.ok) {
throw new Error(HTTP error! status: ${response.status}
);
}
const assistantMessage: Message = {
id: (Date.now() + 1).toString(),
role: ‘assistant’ as const,
content: ‘’
};
setMessages(prev => […prev, assistantMessage]);
const reader = response.body?.getReader();
if (!reader) {
throw new Error(‘No reader available’);
}
const decoder = new TextDecoder();
let buffer = ‘’;
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
buffer += chunk;
while (buffer.includes('\n\n')) {
const lineEnd = buffer.indexOf('\n\n');
const line = buffer.slice(0, lineEnd);
buffer = buffer.slice(lineEnd + 2);
if (line.startsWith('data: ')) {
try {
const jsonData = JSON.parse(line.slice(6)); // slice(6) to remove 'data: '
const chars = jsonData.content.split('');
const batchSize = 2;
for (let i = 0; i < chars.length; i += batchSize) {
const batch = chars.slice(i, i + batchSize).join('');
await new Promise(resolve => setTimeout(resolve, 10));
setMessages(prev => {
const newMessages = [...prev];
const lastMessage = newMessages[newMessages.length - 1];
if (lastMessage.role === 'assistant') {
return [
...newMessages.slice(0, -1),
{
...lastMessage,
content: lastMessage.content + batch
}
];
}
return newMessages;
});
}
} catch (e) {
console.error('Failed to parse line:', line);
}
}
}
}
} finally {
reader.releaseLock();
}
} catch (error) {
console.error(‘Error:’, error);
} finally {
setIsLoading(false);
window.history.replaceState({}, ‘’, /chat/${id}
);
}
};
const stop = () => {
setIsLoading(false);
};
const append = async (message: Message) => {
setMessages(prev => […prev, message]);
};
useEffect(() => {
console.log(‘Messages updated:’, messages);
}, [messages]);
const [messagesContainerRef, messagesEndRef] =
useScrollToBottom();
const handleSubmitWrapper = (event?: { preventDefault?: () => void } | undefined) => {
handleSubmit(event);
};
const appendWrapper = async (message: Message | CreateMessage) => {
if (‘id’ in message) {
await append(message as Message);
}
return null;
};
return (
{messages.length === 0 && }
{messages.map((message) => (
))}
<div
ref={messagesEndRef}
className="shrink-0 min-w-[24px] min-h-[24px]"
/>
</div>
<form className="flex mx-auto px-4 bg-background pb-4 md:pb-6 gap-2 w-full md:max-w-3xl">
<MultimodalInput
input={input}
setInput={setInput}
handleSubmit={handleSubmitWrapper}
isLoading={isLoading}
stop={stop}
attachments={attachments}
setAttachments={setAttachments}
messages={messages}
setMessages={setMessages}
append={appendWrapper}
/>
</form>
</>
);
}
I did not use the useChat function because that function has always had problems and cannot achieve streaming output.
Thank you very much for help me.