diff --git a/docs/concepts/llms.mdx b/docs/concepts/llms.mdx index 2aada5fac..f2f027746 100644 --- a/docs/concepts/llms.mdx +++ b/docs/concepts/llms.mdx @@ -535,14 +535,13 @@ In this section, you'll find detailed examples that help you select, configure, Set the following environment variables in your `.env` file: ```toml Code - HUGGINGFACE_API_KEY= + HF_TOKEN= ``` Example usage in your CrewAI project: ```python Code llm = LLM( - model="huggingface/meta-llama/Meta-Llama-3.1-8B-Instruct", - base_url="your_api_endpoint" + model="huggingface/meta-llama/Meta-Llama-3.1-8B-Instruct" ) ``` diff --git a/src/crewai/cli/constants.py b/src/crewai/cli/constants.py index fec0b6384..2e7ed3728 100644 --- a/src/crewai/cli/constants.py +++ b/src/crewai/cli/constants.py @@ -91,6 +91,12 @@ ENV_VARS = { "key_name": "CEREBRAS_API_KEY", }, ], + "huggingface": [ + { + "prompt": "Enter your Huggingface API key (HF_TOKEN) (press Enter to skip)", + "key_name": "HF_TOKEN", + }, + ], "sambanova": [ { "prompt": "Enter your SambaNovaCloud API key (press Enter to skip)", @@ -106,6 +112,7 @@ PROVIDERS = [ "gemini", "nvidia_nim", "groq", + "huggingface", "ollama", "watson", "bedrock", @@ -270,6 +277,12 @@ MODELS = { "bedrock/mistral.mistral-7b-instruct-v0:2", "bedrock/mistral.mixtral-8x7b-instruct-v0:1", ], + "huggingface": [ + "huggingface/meta-llama/Meta-Llama-3.1-8B-Instruct", + "huggingface/mistralai/Mixtral-8x7B-Instruct-v0.1", + "huggingface/tiiuae/falcon-180B-chat", + "huggingface/google/gemma-7b-it", + ], "sambanova": [ "sambanova/Meta-Llama-3.3-70B-Instruct", "sambanova/QwQ-32B-Preview", diff --git a/tests/cli/test_constants.py b/tests/cli/test_constants.py new file mode 100644 index 000000000..61d8e069b --- /dev/null +++ b/tests/cli/test_constants.py @@ -0,0 +1,23 @@ +import pytest + +from crewai.cli.constants import ENV_VARS, MODELS, PROVIDERS + + +def test_huggingface_in_providers(): + """Test that Huggingface is in the PROVIDERS list.""" + assert "huggingface" in PROVIDERS + + +def test_huggingface_env_vars(): + """Test that Huggingface environment variables are properly configured.""" + assert "huggingface" in ENV_VARS + assert any( + detail.get("key_name") == "HF_TOKEN" + for detail in ENV_VARS["huggingface"] + ) + + +def test_huggingface_models(): + """Test that Huggingface models are properly configured.""" + assert "huggingface" in MODELS + assert len(MODELS["huggingface"]) > 0