Fix: Add LLM configuration examples and .env.example to crew templates

- Add .env.example template file with LLM configuration examples
- Update crew.py template to import LLM and os for configuration
- Update README with comprehensive LLM configuration documentation
- Update create_crew.py to include .env.example in template files
- Add test to verify .env.example is included in generated templates

Fixes #3816

Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
Devin AI
2025-10-31 03:56:16 +00:00
parent 2e9eb8c32d
commit 888a1a9c77
5 changed files with 95 additions and 16 deletions

View File

@@ -121,6 +121,7 @@ def copy_template_files(folder_path, name, class_name, parent_folder):
root_template_files = (
[
".gitignore",
".env.example",
"pyproject.toml",
"README.md",
"knowledge/user_preference.txt",
@@ -242,7 +243,7 @@ def create_crew(name, provider=None, skip_provider=False, parent_folder=None):
templates_dir = package_dir / "templates" / "crew"
root_template_files = (
[".gitignore", "pyproject.toml", "README.md", "knowledge/user_preference.txt"]
[".gitignore", ".env.example", "pyproject.toml", "README.md", "knowledge/user_preference.txt"]
if not parent_folder
else []
)

View File

@@ -0,0 +1,21 @@
# LLM Configuration
# The MODEL variable is set during project creation based on your provider selection
# You can change it to any supported model from your chosen provider
MODEL=gpt-4o-mini
# API Keys
# Add your API key for the provider you're using
OPENAI_API_KEY=your_openai_api_key_here
# ANTHROPIC_API_KEY=your_anthropic_api_key_here
# GEMINI_API_KEY=your_gemini_api_key_here
# GROQ_API_KEY=your_groq_api_key_here
# Custom Provider Configuration (optional)
# For custom providers like DeepSeek, you may need to set a base URL
# Example for DeepSeek:
# MODEL=deepseek-chat
# OPENAI_API_KEY=your_deepseek_api_key
# OPENAI_API_BASE=https://api.deepseek.com
# For other custom providers, refer to the LiteLLM documentation:
# https://docs.litellm.ai/docs/providers

View File

@@ -20,7 +20,37 @@ crewai install
```
### Customizing
**Add your `OPENAI_API_KEY` into the `.env` file**
**Configure your LLM**
1. Copy `.env.example` to `.env` and add your API keys
2. The `MODEL` variable in `.env` was set during project creation
3. You can change the model or add custom LLM configuration in your agents
Example LLM configurations:
```python
from crewai import LLM
# Using environment variables (recommended)
llm = LLM(model=os.getenv("MODEL"))
# OpenAI
llm = LLM(model="gpt-4o-mini", temperature=0.7)
# Anthropic
llm = LLM(model="claude-3-5-sonnet-20240620")
# Custom provider (e.g., DeepSeek)
llm = LLM(
model="deepseek-chat",
api_key=os.getenv("OPENAI_API_KEY"),
base_url="https://api.deepseek.com"
)
```
For more LLM configuration options, see: https://docs.crewai.com/concepts/llms
**Customize your crew**
- Modify `src/{{folder_name}}/config/agents.yaml` to define your agents
- Modify `src/{{folder_name}}/config/tasks.yaml` to define your tasks

View File

@@ -1,10 +1,8 @@
from crewai import Agent, Crew, Process, Task
from crewai import Agent, Crew, Process, Task, LLM
from crewai.project import CrewBase, agent, crew, task
from crewai.agents.agent_builder.base_agent import BaseAgent
from typing import List
# If you want to run a snippet of code before or after the crew starts,
# you can use the @before_kickoff and @after_kickoff decorators
# https://docs.crewai.com/concepts/crews#example-crew-class-with-decorators
import os
@CrewBase
class {{crew_name}}():
@@ -13,24 +11,18 @@ class {{crew_name}}():
agents: List[BaseAgent]
tasks: List[Task]
# Learn more about YAML configuration files here:
# Agents: https://docs.crewai.com/concepts/agents#yaml-configuration-recommended
# Tasks: https://docs.crewai.com/concepts/tasks#yaml-configuration-recommended
# If you would like to add tools to your agents, you can learn more about it here:
# https://docs.crewai.com/concepts/agents#agent-tools
@agent
def researcher(self) -> Agent:
return Agent(
config=self.agents_config['researcher'], # type: ignore[index]
verbose=True
config=self.agents_config['researcher'],
verbose=True,
)
@agent
def reporting_analyst(self) -> Agent:
return Agent(
config=self.agents_config['reporting_analyst'], # type: ignore[index]
verbose=True
config=self.agents_config['reporting_analyst'],
verbose=True,
)
# To learn more about structured task outputs,

View File

@@ -328,3 +328,38 @@ def test_env_vars_are_uppercased_in_env_file(
env_file_path = crew_path / ".env"
content = env_file_path.read_text()
assert "MODEL=" in content
@mock.patch("crewai.cli.create_crew.copy_template")
@mock.patch("crewai.cli.create_crew.write_env_file")
@mock.patch("crewai.cli.create_crew.load_env_vars")
def test_create_crew_includes_env_example_file(
mock_load_env, mock_write_env, mock_copy_template, temp_dir
):
mock_load_env.return_value = {}
with tempfile.TemporaryDirectory() as work_dir:
with mock.patch(
"crewai.cli.create_crew.create_folder_structure"
) as mock_create_folder:
mock_folder_path = Path(work_dir) / "test_project"
mock_create_folder.return_value = (
mock_folder_path,
"test_project",
"TestProject",
)
create_crew("test-project", skip_provider=True)
copy_calls = mock_copy_template.call_args_list
env_example_copied = False
for call in copy_calls:
args = call[0]
if len(args) >= 1:
src_file = args[0]
if ".env.example" in str(src_file):
env_example_copied = True
break
assert env_example_copied, ".env.example should be copied to the project"