From a978267fa20e7ab20fcaa12911e536ba2d67d3d5 Mon Sep 17 00:00:00 2001 From: Mark McDonald Date: Tue, 25 Nov 2025 03:49:29 +0800 Subject: [PATCH] feat: Add gemini-3-pro-preview (#3950) * Add gemini-3-pro-preview Also refactors the tool support check for better forward compatibility. * Add cassette for Gemini 3 Pro --------- Co-authored-by: Greyson LaLonde --- lib/crewai/src/crewai/cli/constants.py | 1 + lib/crewai/src/crewai/llm.py | 1 + lib/crewai/src/crewai/llms/constants.py | 2 + .../llms/providers/gemini/completion.py | 7 +- ...i_models[gemini-gemini-3-pro-preview].yaml | 69 +++++++++++++++++++ lib/crewai/tests/llms/google/test_google.py | 2 - lib/crewai/tests/test_llm.py | 1 + 7 files changed, 78 insertions(+), 5 deletions(-) create mode 100644 lib/crewai/tests/cassettes/test_gemini_models[gemini-gemini-3-pro-preview].yaml diff --git a/lib/crewai/src/crewai/cli/constants.py b/lib/crewai/src/crewai/cli/constants.py index ec0bd2ac8..a3755b1a6 100644 --- a/lib/crewai/src/crewai/cli/constants.py +++ b/lib/crewai/src/crewai/cli/constants.py @@ -145,6 +145,7 @@ MODELS = { "claude-3-haiku-20240307", ], "gemini": [ + "gemini/gemini-3-pro-preview", "gemini/gemini-1.5-flash", "gemini/gemini-1.5-pro", "gemini/gemini-2.0-flash-lite-001", diff --git a/lib/crewai/src/crewai/llm.py b/lib/crewai/src/crewai/llm.py index b0cf42091..5818fdac9 100644 --- a/lib/crewai/src/crewai/llm.py +++ b/lib/crewai/src/crewai/llm.py @@ -179,6 +179,7 @@ LLM_CONTEXT_WINDOW_SIZES: Final[dict[str, int]] = { "o3-mini": 200000, "o4-mini": 200000, # gemini + "gemini-3-pro-preview": 1048576, "gemini-2.0-flash": 1048576, "gemini-2.0-flash-thinking-exp-01-21": 32768, "gemini-2.0-flash-lite-001": 1048576, diff --git a/lib/crewai/src/crewai/llms/constants.py b/lib/crewai/src/crewai/llms/constants.py index 2765a9458..fc4656455 100644 --- a/lib/crewai/src/crewai/llms/constants.py +++ b/lib/crewai/src/crewai/llms/constants.py @@ -235,6 +235,7 @@ ANTHROPIC_MODELS: list[AnthropicModels] = [ ] GeminiModels: TypeAlias = Literal[ + "gemini-3-pro-preview", "gemini-2.5-pro", "gemini-2.5-pro-preview-03-25", "gemini-2.5-pro-preview-05-06", @@ -287,6 +288,7 @@ GeminiModels: TypeAlias = Literal[ "learnlm-2.0-flash-experimental", ] GEMINI_MODELS: list[GeminiModels] = [ + "gemini-3-pro-preview", "gemini-2.5-pro", "gemini-2.5-pro-preview-03-25", "gemini-2.5-pro-preview-05-06", diff --git a/lib/crewai/src/crewai/llms/providers/gemini/completion.py b/lib/crewai/src/crewai/llms/providers/gemini/completion.py index 8668a8f58..027262865 100644 --- a/lib/crewai/src/crewai/llms/providers/gemini/completion.py +++ b/lib/crewai/src/crewai/llms/providers/gemini/completion.py @@ -1,5 +1,6 @@ import logging import os +import re from typing import Any, cast from pydantic import BaseModel @@ -100,9 +101,8 @@ class GeminiCompletion(BaseLLM): self.stop_sequences = stop_sequences or [] # Model-specific settings - self.is_gemini_2 = "gemini-2" in model.lower() - self.is_gemini_1_5 = "gemini-1.5" in model.lower() - self.supports_tools = self.is_gemini_1_5 or self.is_gemini_2 + version_match = re.search(r"gemini-(\d+(?:\.\d+)?)", model.lower()) + self.supports_tools = bool(version_match and float(version_match.group(1)) >= 1.5) @property def stop(self) -> list[str]: @@ -559,6 +559,7 @@ class GeminiCompletion(BaseLLM): ) context_windows = { + "gemini-3-pro-preview": 1048576, # 1M tokens "gemini-2.0-flash": 1048576, # 1M tokens "gemini-2.0-flash-thinking": 32768, "gemini-2.0-flash-lite": 1048576, diff --git a/lib/crewai/tests/cassettes/test_gemini_models[gemini-gemini-3-pro-preview].yaml b/lib/crewai/tests/cassettes/test_gemini_models[gemini-gemini-3-pro-preview].yaml new file mode 100644 index 000000000..eb47f39b7 --- /dev/null +++ b/lib/crewai/tests/cassettes/test_gemini_models[gemini-gemini-3-pro-preview].yaml @@ -0,0 +1,69 @@ +interactions: +- request: + body: '{"contents":[{"role":"user","parts":[{"text":"What is the capital of France?"}]}],"generationConfig":{"stop_sequences":[]}}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '123' + content-type: + - application/json + host: + - generativelanguage.googleapis.com + user-agent: + - litellm/1.78.5 + method: POST + uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-3-pro-preview:generateContent + response: + body: + string: !!binary | + H4sIAAAAAAAC/21UW4+iSBh9719heGxmBgFvbDIPgKAgNwUV3OxDCSWU3KFApdP/fWl77XF2l6RI + 5ftOnVN1ku+8vQwGhA+yAAUAw5r4Y/BnXxkM3u7/j16eYZjhvvEo9cUCVPgX9vN7e9r3EAyvH4cI + J4IDHxQIg2SQnwZyBTIfDlA9eH21QIXq19cfxLd/HY3yJoywjcIM4KaCHzRSvZbEWpL4YIlRytG8 + a3eoGiukHPHm3jH2FNvMTC1qLlgS05RL42PVyPMdz1uFHpQuytZSBqcHf7PexMHK3mjJQjWKIbM+ + MxFL6cvWMMfQFsOJ3UQk5j1hWmoxK1DrLqncyrpcQ+UY0uZog2oqkTmXiQ2f27ZBpS58MXBTxRbX + qdfsl25Vn5tswrUHeVhVxenW7kaG0cKdt2hjjxPUBYY26BAUvbqqw30AoG0eTMmzdImnIrI51+VY + xeqUl/HKs8ZgfBPF0bbtMDjMzxZSkv3KNuJgwTlYMkw9YEyKMcfkRvUmkiPpBqL486niJEuQKtE7 + XibhpJy1AltrXSrjq+iEucKfK5z43Ci6bTu+VIVuRNecmwRN2gnbqQHH6lQ06eNM5ttpwEjZVOI3 + umesM9qbcxMySprtbDYXaboQdioPMpuEy3U4VZrM6njN0rAk8Fh3/ON+E58FJPDtxD8upIWTbI/D + MrqM7RWj7VWo6kMFUgaj5Dpzsg8bE6GoIc+rJEcnau8qGNnZygGNcRO61nD5sXgyWbUQ+Z4XQhrX + 3C6UyS2OTHAp2cUJVp0eSZqtyTuTy48XjmW0xLJVYRqYYmSZhatQ45ROKPZiXTZTxiq2ceDPIhii + 7tBurqtSL7ylp5NRw5FUzJXsLkiRJs1BIi05Oxit51ToBF2oTGOvYTXjfJptR62SVdTB7W5aaJzq + nb9adAVFIii3gZE5Qz87C+ViVKa3eJ2f4pyiSzasywoHJA2klNL01IIYX6o55V8n3BUc8vKagLIp + d/pRZoatSfor/yx4bAYp/udP4mlc3r/2f/2aIqLKk/vUpHkAkwf8/QEgTihDdbSBoM6zD5jtmNbX + EBIoC+C1Lw9fHgJ3aqKpQQh1iEGfFOArD4iiytMCO3kMMzFv7kkx++R6ypX/beO8D4XfOvSI/vYf + 1nrea6LkOW+eoqh/IkgQvt2zRnKdpzDpBZ5VHza8PLn1yJrfL0gz45d//Pq0cAerGn16FcK0d+87 + +72/Yb9gi+DlrklUsC7yrIZK8IHbeV4/2Sy/LL9r50a3aquVZ2uPeHl/+RvdmjG6dAUAAA== + headers: + Alt-Svc: + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Content-Encoding: + - gzip + Content-Type: + - application/json; charset=UTF-8 + Date: + - Wed, 19 Nov 2025 08:56:53 GMT + Server: + - scaffolding on HTTPServer2 + Server-Timing: + - gfet4t7; dur=2508 + Transfer-Encoding: + - chunked + Vary: + - Origin + - X-Origin + - Referer + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + X-XSS-Protection: + - '0' + status: + code: 200 + message: OK +version: 1 diff --git a/lib/crewai/tests/llms/google/test_google.py b/lib/crewai/tests/llms/google/test_google.py index c6f271b0a..1dd585729 100644 --- a/lib/crewai/tests/llms/google/test_google.py +++ b/lib/crewai/tests/llms/google/test_google.py @@ -455,13 +455,11 @@ def test_gemini_model_capabilities(): llm_2_0 = LLM(model="google/gemini-2.0-flash-001") from crewai.llms.providers.gemini.completion import GeminiCompletion assert isinstance(llm_2_0, GeminiCompletion) - assert llm_2_0.is_gemini_2 == True assert llm_2_0.supports_tools == True # Test Gemini 1.5 model llm_1_5 = LLM(model="google/gemini-1.5-pro") assert isinstance(llm_1_5, GeminiCompletion) - assert llm_1_5.is_gemini_1_5 == True assert llm_1_5.supports_tools == True diff --git a/lib/crewai/tests/test_llm.py b/lib/crewai/tests/test_llm.py index ad3dd9963..50df854d4 100644 --- a/lib/crewai/tests/test_llm.py +++ b/lib/crewai/tests/test_llm.py @@ -259,6 +259,7 @@ def test_validate_call_params_no_response_format(): @pytest.mark.parametrize( "model", [ + "gemini/gemini-3-pro-preview", "gemini/gemini-2.0-flash-thinking-exp-01-21", "gemini/gemini-2.0-flash-001", "gemini/gemini-2.0-flash-lite-001",