Source code for agentscope.embedding._gemini_embedding
# -*- coding: utf-8 -*-"""The gemini text embedding model class."""fromdatetimeimportdatetimefromtypingimportAny,Listfrom._embedding_responseimportEmbeddingResponsefrom._embedding_usageimportEmbeddingUsagefrom._cache_baseimportEmbeddingCacheBasefrom._embedding_baseimportEmbeddingModelBase
[docs]classGeminiTextEmbedding(EmbeddingModelBase):"""The Gemini text embedding model."""
[docs]def__init__(self,api_key:str,model_name:str,embedding_cache:EmbeddingCacheBase|None=None,**kwargs:Any,)->None:"""Initialize the Gemini text embedding model class. Args: api_key (`str`): The Gemini API key. model_name (`str`): The name of the embedding model. embedding_cache (`EmbeddingCacheBase | None`, defaults to `None`): The embedding cache class instance, used to cache the embedding results to avoid repeated API calls. """fromgoogleimportgenaisuper().__init__(model_name)self.client=genai.Client(api_key=api_key,**kwargs)self.embedding_cache=embedding_cache
[docs]asyncdef__call__(self,text:List[str],**kwargs:Any,)->EmbeddingResponse:"""The Gemini embedding API call. Args: text (`List[str]`): The input text to be embedded. It can be a list of strings. """kwargs={"model":self.model_name,"contents":text,"config":kwargs,}ifself.embedding_cache:cached_embeddings=awaitself.embedding_cache.retrieve(identifier=kwargs,)ifcached_embeddings:returnEmbeddingResponse(embeddings=cached_embeddings,usage=EmbeddingUsage(tokens=0,time=0,),source="cache",)start_time=datetime.now()response=self.client.models.embed_content(**kwargs)time=(datetime.now()-start_time).total_seconds()ifself.embedding_cache:awaitself.embedding_cache.store(identifier=kwargs,embeddings=[_.valuesfor_inresponse.embeddings],)returnEmbeddingResponse(embeddings=[_.valuesfor_inresponse.embeddings],usage=EmbeddingUsage(time=time,),)