Status 500 when attempting vertex context caching

I’m receiving status 500 response codes from the vertexai API. Can someone else verify that the service is not down or whether the issue is on my end?

import vertexai
from vertexai.preview import caching 
from vertexai.generative_models import Part
from google.oauth2.service_account import Credentials

def create_new_gemini_cache(display_name, content, time_to_live):
    try:
        cached_content = caching.CachedContent.create(
            model_name='gemini-1.5-pro-001',
            contents=content,
            ttl=time_to_live
        )
        logging.info(f'#### Content cached')
        return cached_content

    except Exception as e:  
        handle_exception(e) 

# Get creds
gcp_credentials = Credentials.from_service_account_info(gemini_cred_json, scopes=['https://www.googleapis.com/auth/cloud-platform'])

# Init project
vertexai.init(project=GCP_PROJECT_ID, location=GCP_REGION, credentials=gcp_credentials)  

# Get content
content = [Part.from_text(<1.8M token text>)] # Assume large text here.

# Define cache ttl
time_to_live = datetime.timedelta(minutes=60)

# Request  gemini cache content
cache_content = create_new_gemini_cache(content, time_to_live)

Thank you

Upgrading to v1.65.0 worked…