diff --git a/llama-index-integrations/llms/llama-index-llms-gemini/llama_index/llms/gemini/base.py b/llama-index-integrations/llms/llama-index-llms-gemini/llama_index/llms/gemini/base.py index 119b252c0d37c..8779fcc4f5516 100644 --- a/llama-index-integrations/llms/llama-index-llms-gemini/llama_index/llms/gemini/base.py +++ b/llama-index-integrations/llms/llama-index-llms-gemini/llama_index/llms/gemini/base.py @@ -96,6 +96,7 @@ def __init__( api_base: Optional[str] = None, transport: Optional[str] = None, model_name: Optional[str] = None, + default_headers: Optional[Dict[str, str]] = None, **generate_kwargs: Any, ): """Creates a new Gemini model interface.""" @@ -123,6 +124,13 @@ def __init__( config_params["client_options"] = {"api_endpoint": api_base} if transport: config_params["transport"] = transport + if default_headers: + default_metadata: Sequence[Dict[str, str]] = [] + for key, value in default_headers.items(): + default_metadata.append((key, value)) + # `default_metadata` contains (key, value) pairs that will be sent with every request. + # When using `transport="rest"`, these will be sent as HTTP headers. + config_params["default_metadata"] = default_metadata # transport: A string, one of: [`rest`, `grpc`, `grpc_asyncio`]. genai.configure(**config_params) diff --git a/llama-index-integrations/llms/llama-index-llms-gemini/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-gemini/pyproject.toml index fa783c1ecfdca..db29a199b57e8 100644 --- a/llama-index-integrations/llms/llama-index-llms-gemini/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-gemini/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-gemini" readme = "README.md" -version = "0.1.12" +version = "0.2.0" [tool.poetry.dependencies] python = ">=3.9,<4.0"