Audio input transcription getting stuck in middle of the comversation

I am using gemini-2.5-flash-native-audio-preview-12-2025 for my realtime conversation app. so when I am answering the question and if my answer is long I am not able to get the full transcription of it. it started well, but after some time it got stucked, I am not receive any transcription after some time for long answer. Is there any limit on it ?

attching the code below
```{
`` responseModalities: [Modality.AUDIO],
systemInstruction,
sessionResumption: sessionHandle ? { handle: sessionHandle } : {},
contextWindowCompression: {
slidingWindow: {},
},
outputAudioTranscription: {},
inputAudioTranscription: {},
realtimeInputConfig: {
automaticActivityDetection: {
disabled: false,
startOfSpeechSensitivity: StartSensitivity.START_SENSITIVITY_LOW,
endOfSpeechSensitivity: EndSensitivity.END_SENSITIVITY_LOW,
prefixPaddingMs: 20,
silenceDurationMs: 3000,
},
turnCoverage: TurnCoverage.TURN_INCLUDES_ALL_INPUT,
},
speechConfig: {
voiceConfig: {
prebuiltVoiceConfig: {
voiceName: “Zephyr”,
},
},
},
thinkingConfig: {
includeThoughts: false,
},
tools: [
{
functionDeclarations: [
{
name: “end_interview”,
description: “End the interview session when the interview is complete or the candidate requests to end it.”,
parameters: {
type: Type.OBJECT,
properties: {},
},
},
],
},
],
}
//handler for incoming messages
const handleServerMessage = useCallback(
async (message: any) => {
const currentHandle = sessionHandleRef.current;
// console.log(“:rocket: ~ handleServerMessage ~ message:”, message);

  // Handle setup completion - trigger initial kickoff
  if (message?.setupComplete && !initialKickoffSentRef.current) {
    console.log("🚀 ~ handleServerMessage ~ message:", "setup complete");
    if (sessionRef.current) {
      try {
        sessionRef.current.sendClientContent({
          turns: "Please start the interview now with your introduction and first question.",
          turnComplete: true,
        });
        initialKickoffSentRef.current = true;
      } catch (error) {
        console.error("Failed to trigger interviewer start:", error);
      }
    }
  }

  // Handle GoAway message - connection will close soon
  if (message?.goAway) {
    goAwayCountRef.current += 1;
    const goAwayCount = goAwayCountRef.current;
    const timeLeft = message.goAway.timeLeft || "50s";
    
    console.warn("⚠️ GoAway received. Time left:", timeLeft);
    setWarning(`Connection will refresh in ${timeLeft}...`);
    goAwayReceivedRef.current = true;
    await persistConversation(goAwayCount === 1 ? "snapshot" : "final");
    
    // Stop audio stream immediately
    if (isMicOn) {
      console.log("🔇 Stopping audio stream immediately due to GoAway...");
      audioStream.stopMicStream();
      setIsMicOn(false);
    }

    // If second GoAway, finalize interview
    if (goAwayCount >= MAX_GOAWAY_COUNT) {
      console.log("⏹️ Second GoAway received. Finalizing session and redirecting to report...");
      await finalizeInterview("system");
      return;
    }

    // Reconnect if we have a handle
    if (currentHandle && !isReconnecting && sessionRef.current && status === "connected") {
      console.log("🔄 Reconnecting immediately with existing session handle...");
      setTimeout(() => {
        if (!isReconnecting && reconnectSessionRef.current) {
          reconnectSessionRef.current();
        }
      }, RECONNECT_CHECK_DELAY_MS);
    } else {
      // Parse time and reconnect proactively
      const timeLeftMs = parseTimeLeft(timeLeft);
      const reconnectDelay = Math.max(timeLeftMs - RECONNECT_DELAY_BEFORE_CLOSE_MS, MIN_RECONNECT_DELAY_MS);
      
      if (!isReconnecting && sessionRef.current && status === "connected") {
        console.log(`🔄 Will reconnect in ${reconnectDelay}ms (before connection closes)...`);
        setTimeout(() => {
          if (
            sessionRef.current &&
            status === "connected" &&
            !isReconnecting &&
            sessionHandleRef.current &&
            reconnectSessionRef.current
          ) {
            console.log("🔄 Proactively reconnecting before connection closes...");
            reconnectSessionRef.current();
          }
        }, reconnectDelay);
      }
    }
    return;
  }

  // Handle session resumption updates
  if (message?.sessionResumptionUpdate) {
    const update = message.sessionResumptionUpdate;
    console.log("📝 Session resumption update:", update);
    
    if (update.resumable && update.newHandle) {
      console.log("✅ New session handle received:", update.newHandle);
      updateSessionHandle(update.newHandle);
      
      // If we received GoAway earlier and now have a handle, reconnect immediately
      if (goAwayReceivedRef.current && !isReconnecting && sessionRef.current && status === "connected") {
        console.log("🔄 GoAway was received earlier, reconnecting immediately with new handle...");
        setTimeout(() => {
          if (!isReconnecting && sessionHandleRef.current && reconnectSessionRef.current) {
            reconnectSessionRef.current();
          }
        }, RECONNECT_CHECK_DELAY_MS);
      }
    }
  }

  // Handle user transcription
  if (message?.serverContent?.inputTranscription?.text) {
    const transcriptionText = message.serverContent.inputTranscription.text;
    console.info("outputTranscription?",message?.serverContent)
    if (transcriptionText) {
      handleAddChatMessage("user", transcriptionText, "audio", { merge: true });
    }
  }

  // Handle AI transcription
  if (message?.serverContent?.outputTranscription?.text) {
    handleAddChatMessage(
      "assistant",
      message.serverContent.outputTranscription.text,
      "audio",
      { merge: true },
    );
  }

  // Handle model turn
  if (message?.serverContent?.modelTurn?.parts) {
    audioStream.resetAudioTurnFlag();

    for (const part of message.serverContent.modelTurn.parts) {
      if (part.text) {
        handleAddChatMessage("assistant", part.text, "text", { merge: true });
      }
      if (part.inlineData?.data) {
        const audioBuffer = base64ToArrayBuffer(part.inlineData.data);
        audioPlayback.enqueuePlayback(audioBuffer);
      }
    }
  }

  // Handle interruption
  if (message?.serverContent?.interrupted) {
    audioPlayback.clearPlaybackQueue();
    if (!audioStream.getHasSentAudio()) {
      setWarning("AI playback interrupted.");
      setTimeout(() => setWarning(null), 3000);
    }
  }

  // Handle tool calls (e.g., end_interview)
  if (message?.toolCall?.functionCalls) {
    for (const functionCall of message.toolCall.functionCalls) {
      console.log(`🔧 Tool call received: ${functionCall.name}`, functionCall.args);
      
      if (functionCall.name === "end_interview") {
        // End the interview when user requests it via tool call
        console.log("✅ Ending interview as requested by user via tool call");
        handleAddChatMessage(
          "system",
          "Interview ending as requested. Thank you for your time!",
          "text",
        );
        
        // Send tool response before finalizing
        if (sessionRef.current) {
          try {
            sessionRef.current.sendToolResponse({
              functionResponses: [
                {
                  id: functionCall.id,
                  name: functionCall.name,
                  response: { success: true, message: "Interview ended successfully." },
                },
              ],
            });
          } catch (error) {
            console.error("Failed to send tool response:", error);
          }
        }
        
        // Finalize interview after a short delay to ensure response is sent
        setTimeout(() => {
          finalizeInterview("system");
        }, 500);
        return;
      }
      
      // Handle other tool calls if needed in the future
      // Send a default response for unknown tool calls
      if (sessionRef.current) {
        try {
          sessionRef.current.sendToolResponse({
            functionResponses: [
              {
                id: functionCall.id,
                name: functionCall.name,
                response: { error: "Unknown function call" },
              },
            ],
          });
        } catch (error) {
          console.error("Failed to send tool response:", error);
        }
      }
    }
  }
},
[
  handleAddChatMessage,
  audioStream,
  audioPlayback,
  isMicOn,
  isReconnecting,
  status,
  updateSessionHandle,
  persistConversation,
  finalizeInterview,
],

); ``

Hi @prathamesh_mungekar,

Yes, there is a limit of 8192 tokens for output. Refer here.

As I went through the log, I can see turnComplete:true this confirms the server is cutting the stream.

Also, maxOutputTokens still applies here, so set it to the maximum value, i.e., 8192, and see if that works.
Thank you!