Error during chat session: 500 An internal error has occurred

Here is my code -

# gemini_processor.py

import os
import time
import google.generativeai as genai
from dotenv import load_dotenv
import uuid
import tempfile

# Load environment variables
load_dotenv()

# Configure Gemini API
genai.configure(api_key=os.getenv("GEMINI_API_KEY"))

def upload_to_gemini(path, mime_type=None):
    """
    Upload a file to Gemini API.
    """
    try:
        file = genai.upload_file(path, mime_type=mime_type)
        print(f"Uploaded file '{file.display_name}' as: {file.uri}")
        return file
    except Exception as e:
        print(f"Error uploading file to Gemini API: {e}")
        return None

def wait_for_files_active(files):
    """
    Wait until all uploaded files are in ACTIVE state.
    """
    print("Waiting for file processing...")
    for name in (file.name for file in files):
        file = genai.get_file(name)
        while file.state.name == "PROCESSING":
            print(".", end="", flush=True)
            time.sleep(10)
            file = genai.get_file(name)
        if file.state.name != "ACTIVE":
            raise Exception(f"File {file.name} failed to process")
    print("...all files ready\n")

def process_content(username, content, task_id, tasks):
    """
    Process the scraped content through Gemini API and update the tasks dict with progress.
    """
    try:
        generation_config = {
            "temperature": 1,
            "top_p": 0.95,
            "top_k": 64,
            "max_output_tokens": 8192,
            "response_mime_type": "text/plain",
        }

        tasks[task_id]['progress'] = 'Initializing Gemini model...'
        model = genai.GenerativeModel(
            model_name="gemini-exp-1206",  # Replace with actual model name if different
            generation_config=generation_config,
        )

        # Create a unique temporary file
        temp_dir = tempfile.gettempdir()
        unique_id = uuid.uuid4().hex
        temp_input_file = os.path.join(temp_dir, f"{username}_{unique_id}_reddit_full_data.md")

        # Write content to the temporary input file
        with open(temp_input_file, "w", encoding="utf-8") as f:
            f.write(content)

        tasks[task_id]['progress'] = 'Uploading file to Gemini API...'
        # Upload the file
        uploaded_file = upload_to_gemini(temp_input_file, mime_type="text/markdown")
        if not uploaded_file:
            tasks[task_id]['status'] = 'Failed'
            tasks[task_id]['progress'] = 'Failed to upload file to Gemini API.'
            os.remove(temp_input_file)  # Clean up
            return None

        tasks[task_id]['progress'] = 'Waiting for Gemini to process the file...'
        # Wait for the file to be active
        wait_for_files_active([uploaded_file])

        tasks[task_id]['progress'] = 'Generating analysis report...'
        # Start chat session with the designed prompt
        chat_session = model.start_chat(
            history=[
                {
                    "role": "user",
                    "parts": [
                        uploaded_file,
                        "[My Detailed Prompt]"
                    ],},
                        {
                        "role": "model",
                        "parts": [
                            "Yes, I will do it.",
                        ],
                    },
                ]
        )

        try:
            response = chat_session.send_message("Yes Do IT!!!!")
        except Exception as e:
            print(f"Error during chat session: {e}")
            os.remove(temp_input_file)  # Clean up
            tasks[task_id]['progress'] = 'Failed during Gemini processing.'
            tasks[task_id]['status'] = 'Failed'
            return None

        # Save the response to a unique .md file
        unique_id = uuid.uuid4().hex
        output_filename = f"response_output_{username}_{unique_id}.md"
        output_path = os.path.join(temp_dir, output_filename)
        with open(output_path, "w", encoding="utf-8") as f:
            f.write(response.text)

        print(f"Response saved to {output_path}")
        tasks[task_id]['progress'] = 'Report generated successfully.'
        tasks[task_id]['status'] = 'Completed'
        tasks[task_id]['report_path'] = output_path
        return output_path

    except Exception as e:
        pass

and you can see that even the files got upload succesfully but then this error hit

Please i really need help