Compare commits

..

1 Commits

Author SHA1 Message Date
Devin AI
e2c12a0487 feat: add support for Gemini 3.0 Flash Preview model
- Add gemini-3-flash-preview to GEMINI_MODELS constant in llms/constants.py
- Add context window size (1048576) for gemini-3-flash-preview in llm.py
- Add gemini-3-flash-preview to CLI constants for discoverability
- Add test case and VCR cassette for gemini-3-flash-preview model

Fixes #4308

Co-Authored-By: João <joao@crewai.com>
2026-01-30 00:09:37 +00:00
5 changed files with 65 additions and 0 deletions

View File

@@ -145,6 +145,7 @@ MODELS = {
"claude-3-haiku-20240307",
],
"gemini": [
"gemini/gemini-3-flash-preview",
"gemini/gemini-3-pro-preview",
"gemini/gemini-1.5-flash",
"gemini/gemini-1.5-pro",

View File

@@ -200,6 +200,7 @@ LLM_CONTEXT_WINDOW_SIZES: Final[dict[str, int]] = {
"o3-mini": 200000,
"o4-mini": 200000,
# gemini
"gemini-3-flash-preview": 1048576,
"gemini-3-pro-preview": 1048576,
"gemini-2.0-flash": 1048576,
"gemini-2.0-flash-thinking-exp-01-21": 32768,

View File

@@ -239,6 +239,7 @@ ANTHROPIC_MODELS: list[AnthropicModels] = [
]
GeminiModels: TypeAlias = Literal[
"gemini-3-flash-preview",
"gemini-3-pro-preview",
"gemini-2.5-pro",
"gemini-2.5-pro-preview-03-25",
@@ -293,6 +294,7 @@ GeminiModels: TypeAlias = Literal[
"learnlm-2.0-flash-experimental",
]
GEMINI_MODELS: list[GeminiModels] = [
"gemini-3-flash-preview",
"gemini-3-pro-preview",
"gemini-2.5-pro",
"gemini-2.5-pro-preview-03-25",

View File

@@ -0,0 +1,60 @@
interactions:
- request:
body: '{"contents": [{"parts": [{"text": "What is the capital of France?"}], "role":
"user"}], "generationConfig": {}}'
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate, zstd
Connection:
- keep-alive
Content-Length:
- '111'
Content-Type:
- application/json
user-agent:
- google-genai-sdk/1.2.0 gl-python/3.12.9
x-goog-api-client:
- google-genai-sdk/1.2.0 gl-python/3.12.9
x-goog-api-key:
- X-GOOG-API-KEY-XXX
method: POST
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-3-flash-preview:generateContent
response:
body:
string: "{\n \"candidates\": [\n {\n \"content\": {\n \"parts\":
[\n {\n \"text\": \"The capital of France is **Paris**.\"\n
\ }\n ],\n \"role\": \"model\"\n },\n \"finishReason\":
\"STOP\",\n \"index\": 0\n }\n ],\n \"usageMetadata\": {\n \"promptTokenCount\":
8,\n \"candidatesTokenCount\": 8,\n \"totalTokenCount\": 16,\n \"promptTokensDetails\":
[\n {\n \"modality\": \"TEXT\",\n \"tokenCount\": 8\n }\n
\ ]\n },\n \"modelVersion\": \"gemini-3-flash-preview\",\n
\ \"responseId\": \"xFIrafrxEOag_uMP_ayUwA1\"\n}\n"
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Content-Type:
- application/json; charset=UTF-8
Date:
- Thu, 30 Jan 2026 00:08:00 GMT
Server:
- scaffolding on HTTPServer2
Server-Timing:
- gfet4t7; dur=500
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- X-CONTENT-TYPE-XXX
X-Frame-Options:
- X-FRAME-OPTIONS-XXX
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
version: 1

View File

@@ -264,6 +264,7 @@ def test_validate_call_params_no_response_format():
@pytest.mark.parametrize(
"model",
[
"gemini/gemini-3-flash-preview",
"gemini/gemini-3-pro-preview",
"gemini/gemini-2.0-flash-thinking-exp-01-21",
"gemini/gemini-2.0-flash-001",