diff --git a/src/crewai/cli/constants.py b/src/crewai/cli/constants.py index 306f1108b..9ec641dd8 100644 --- a/src/crewai/cli/constants.py +++ b/src/crewai/cli/constants.py @@ -103,6 +103,12 @@ ENV_VARS = { "key_name": "SAMBANOVA_API_KEY", } ], + "deepseek": [ + { + "prompt": "Enter your DeepSeek API key (press Enter to skip)", + "key_name": "DEEPSEEK_API_KEY", + } + ], } @@ -119,6 +125,7 @@ PROVIDERS = [ "azure", "cerebras", "sambanova", + "deepseek", ] MODELS = { @@ -314,6 +321,13 @@ MODELS = { "sambanova/Meta-Llama-3.2-3B-Instruct", "sambanova/Meta-Llama-3.2-1B-Instruct", ], + "deepseek": [ + "deepseek/deepseek-chat", + "deepseek/deepseek-coder", + "deepseek/deepseek-r1", + "deepseek/deepseek-v3", + "deepseek/deepseek-reasoner", + ], } DEFAULT_LLM_MODEL = "gpt-4o-mini" diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 88edb5ec5..6b069a215 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -154,6 +154,11 @@ LLM_CONTEXT_WINDOW_SIZES = { "gemini/gemma-3-27b-it": 128000, # deepseek "deepseek-chat": 128000, + "deepseek/deepseek-chat": 128000, + "deepseek/deepseek-coder": 128000, + "deepseek/deepseek-r1": 128000, + "deepseek/deepseek-v3": 128000, + "deepseek/deepseek-reasoner": 128000, # groq "gemma2-9b-it": 8192, "gemma-7b-it": 8192, diff --git a/tests/test_deepseek_integration.py b/tests/test_deepseek_integration.py new file mode 100644 index 000000000..0c3b2d06d --- /dev/null +++ b/tests/test_deepseek_integration.py @@ -0,0 +1,115 @@ +"""Tests for DeepSeek integration in CrewAI.""" + +import pytest +from unittest.mock import patch, MagicMock + +from crewai.llm import LLM +from crewai.cli.constants import ENV_VARS, PROVIDERS, MODELS + + +class TestDeepSeekIntegration: + """Test DeepSeek integration in CrewAI.""" + + def test_deepseek_in_providers(self): + """Test that DeepSeek is included in the providers list.""" + assert "deepseek" in PROVIDERS + + def test_deepseek_in_env_vars(self): + """Test that DeepSeek API key configuration is in ENV_VARS.""" + assert "deepseek" in ENV_VARS + deepseek_config = ENV_VARS["deepseek"] + assert len(deepseek_config) == 1 + assert deepseek_config[0]["key_name"] == "DEEPSEEK_API_KEY" + assert "DeepSeek API key" in deepseek_config[0]["prompt"] + + def test_deepseek_in_models(self): + """Test that DeepSeek models are included in the models dictionary.""" + assert "deepseek" in MODELS + deepseek_models = MODELS["deepseek"] + expected_models = [ + "deepseek/deepseek-chat", + "deepseek/deepseek-coder", + "deepseek/deepseek-r1", + "deepseek/deepseek-v3", + "deepseek/deepseek-reasoner", + ] + for model in expected_models: + assert model in deepseek_models + + def test_llm_creation_with_deepseek_chat(self): + """Test creating LLM instance with deepseek-chat model.""" + llm = LLM(model="deepseek-chat") + assert llm.model == "deepseek-chat" + assert llm.get_context_window_size() > 0 + + def test_llm_creation_with_deepseek_prefix(self): + """Test creating LLM instance with deepseek/ prefix.""" + llm = LLM(model="deepseek/deepseek-chat") + assert llm.model == "deepseek/deepseek-chat" + assert llm._get_custom_llm_provider() == "deepseek" + assert llm.get_context_window_size() > 0 + + def test_deepseek_context_window_sizes(self): + """Test that all DeepSeek models have context window sizes defined.""" + from crewai.llm import LLM_CONTEXT_WINDOW_SIZES + + deepseek_models = [ + "deepseek-chat", + "deepseek/deepseek-chat", + "deepseek/deepseek-coder", + "deepseek/deepseek-r1", + "deepseek/deepseek-v3", + "deepseek/deepseek-reasoner", + ] + + for model in deepseek_models: + assert model in LLM_CONTEXT_WINDOW_SIZES + assert LLM_CONTEXT_WINDOW_SIZES[model] > 0 + + def test_deepseek_models_context_window_consistency(self): + """Test that DeepSeek models have consistent context window sizes.""" + from crewai.llm import LLM_CONTEXT_WINDOW_SIZES + + expected_size = 128000 + deepseek_models = [ + "deepseek-chat", + "deepseek/deepseek-chat", + "deepseek/deepseek-coder", + "deepseek/deepseek-r1", + "deepseek/deepseek-v3", + "deepseek/deepseek-reasoner", + ] + + for model in deepseek_models: + assert LLM_CONTEXT_WINDOW_SIZES[model] == expected_size + + @patch.dict("os.environ", {"DEEPSEEK_API_KEY": "test-key"}) + def test_llm_with_deepseek_api_key(self): + """Test LLM creation with DeepSeek API key in environment.""" + llm = LLM(model="deepseek/deepseek-chat") + assert llm.model == "deepseek/deepseek-chat" + assert llm._get_custom_llm_provider() == "deepseek" + + def test_deepseek_provider_detection(self): + """Test that DeepSeek provider is correctly detected from model name.""" + llm = LLM(model="deepseek/deepseek-chat") + provider = llm._get_custom_llm_provider() + assert provider == "deepseek" + + def test_deepseek_vs_openrouter_provider_detection(self): + """Test provider detection for DeepSeek vs OpenRouter DeepSeek models.""" + deepseek_llm = LLM(model="deepseek/deepseek-chat") + openrouter_llm = LLM(model="openrouter/deepseek/deepseek-chat") + + assert deepseek_llm._get_custom_llm_provider() == "deepseek" + assert openrouter_llm._get_custom_llm_provider() == "openrouter" + + def test_all_deepseek_models_can_be_instantiated(self): + """Test that all DeepSeek models in MODELS can be instantiated.""" + deepseek_models = MODELS["deepseek"] + + for model in deepseek_models: + llm = LLM(model=model) + assert llm.model == model + assert llm._get_custom_llm_provider() == "deepseek" + assert llm.get_context_window_size() > 0