Hello, I have tried to set up GitHub - google-gemini/example-chat-app: Get up and running with the Gemini API using Node.js and Python and have got the chat working, but the stream keeps coming back with the current error: Error during /stream: TypeError: stream is not async iterable.
I would be very grateful for any help?
require(‘dotenv’).config({ path: ‘./.env’ });
/** Package to enable CORS to handle requests from all domains. */
const cors = require(‘cors’);
/** Framework for building RESTful APIs. */
const express = require(‘express’);
/** Initialize the Express app. */
const app = express();
app.use(express.json());
/** Apply the CORS middleware. */
app.use(cors());
/** Enable and listen to port 9000. */
const PORT = process.env.PORT || 9000;
app.listen(PORT, () => {
console.log(‘Server Listening on PORT:’, PORT);
});
// Hard-coded API key
/**
- POST method route for normal chat (complete response, no streaming).
- A chat message and the history of the conversation are sent to the Gemini
- model. The complete response generated by the model to the posted message
- will be returned in the API’s response.
*/
/** Package to use the Gemini API. */
const { GoogleGenerativeAI } = require(‘@google/generative-ai’);
const genAI = new GoogleGenerativeAI({apiKey: GEMINI_API_KEY });
/**
- Initialize the Gemini model that will generate responses based on the
- user’s queries. */
const model = genAI.getGenerativeModel({ model: “gemini-2.0-flash” });
/**
-
POST method route for normal chat(complete response, no streaming).
-
A chat message and the history of the conversation are send to the Gemini
-
model. The complete response generated by the model to the posted message
-
will be returned in the API’s response.
-
Expects a JSON payload in the request with the following format:
-
Request:
-
chat: string,
-
history: Array
-
Returns a JSON payload containing the model response with the
-
following format:
-
Response:
-
text: string
/
app.post(“/chat”, async (req, res) => {
/* Read the request data. */
const chatHistory = req.body.history || ;
const msg = req.body.chat;/** Initialize the chat with the given history. */
const chat = model.startChat({
history: chatHistory
});/**
- Send the message posted by the user to the Gemini model and read the
- response generated by the model.
*/
const result = await chat.sendMessage(msg);
const response = await result.response;
const text = response.text();
/** Send the response returned by the model as the API’s response. */
res.send({“text”:text});
});
/**
- POST method route for streaming response.
- A chat message and the history of the conversation are send to the Gemini
- model. The response generated by the model will be streamed to handle
- partial results.
- Expects a JSON payload in the request with the following format:
- Request:
- chat: string,
- history: Array
- Returns a partial result of the model response with the
- following format:
- Response:
/
app.post(“/stream”, async (req, res) => {
/* Read the request data. */
const chatHistory = req.body.history || ;
const msg = req.body.chat;
/** Initialize the chat with history. */
const chat = model.startChat({
history: chatHistory
});
/**
* Send a new user message and read the response.
* Send the chunk of text result back to the client
* as soon as you receive it.
*/
const result = await chat.sendMessageStream(msg);
for await (const chunk of result.stream) {
const chunkText = chunk.text();
res.write(chunkText);
}
res.end();
});