mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-02-06 05:58:15 +00:00
Compare commits
1 Commits
add-agents
...
devin/1769
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f34110fbd1 |
@@ -25,35 +25,6 @@ For file-based Knowledge Sources, make sure to place your files in a `knowledge`
|
||||
Also, use relative paths from the `knowledge` directory when creating the source.
|
||||
</Tip>
|
||||
|
||||
### Skills.md for Crews (CrewBase)
|
||||
|
||||
When using the `@CrewBase` decorator and project layout (e.g. from `crewai create crew`), you can add **Skills.md** files as crew-level knowledge sources. Place one markdown file per skill under:
|
||||
|
||||
```
|
||||
src/<project>/.agents/<skill_name>/Skills.md
|
||||
```
|
||||
|
||||
Each `Skills.md` is loaded as a knowledge source so the crew can query it via RAG at runtime. Use `get_skills_knowledge_sources()` when building your crew:
|
||||
|
||||
```python
|
||||
from crewai.project import CrewBase, agent, crew, task
|
||||
|
||||
@CrewBase
|
||||
class MyCrew():
|
||||
# ...
|
||||
|
||||
@crew
|
||||
def crew(self) -> Crew:
|
||||
return Crew(
|
||||
agents=self.agents,
|
||||
tasks=self.tasks,
|
||||
process=Process.sequential,
|
||||
knowledge_sources=self.get_skills_knowledge_sources(),
|
||||
)
|
||||
```
|
||||
|
||||
You can combine skills with other knowledge sources: `knowledge_sources=[*self.get_skills_knowledge_sources(), other_source]`. If the `.agents` directory is missing or has no `Skills.md` files, `get_skills_knowledge_sources()` returns an empty list. To use a different directory than `.agents`, set the class attribute `skills_directory = "my_skills"`. Skills.md support requires the `docling` package (`uv add docling`).
|
||||
|
||||
### Vector store (RAG) client configuration
|
||||
|
||||
CrewAI exposes a provider-neutral RAG client abstraction for vector stores. The default provider is ChromaDB, and Qdrant is supported as well. You can switch providers using configuration utilities.
|
||||
|
||||
@@ -129,20 +129,25 @@ PROVIDERS = [
|
||||
|
||||
MODELS = {
|
||||
"openai": [
|
||||
"gpt-4",
|
||||
"gpt-4.1",
|
||||
"gpt-4.1-mini-2025-04-14",
|
||||
"gpt-4.1-nano-2025-04-14",
|
||||
"gpt-4o",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4-turbo",
|
||||
"gpt-4.1",
|
||||
"gpt-4.1-mini",
|
||||
"gpt-4.1-nano",
|
||||
"o1",
|
||||
"o1-mini",
|
||||
"o1-preview",
|
||||
"o3",
|
||||
"o3-mini",
|
||||
"o4-mini",
|
||||
],
|
||||
"anthropic": [
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-sonnet-4-5-20250514",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-haiku-20240307",
|
||||
],
|
||||
"gemini": [
|
||||
"gemini/gemini-3-pro-preview",
|
||||
@@ -230,13 +235,15 @@ MODELS = {
|
||||
"nvidia_nim/baichuan-inc/baichuan2-13b-chat",
|
||||
],
|
||||
"groq": [
|
||||
"groq/llama-3.3-70b-versatile",
|
||||
"groq/llama-3.3-70b-specdec",
|
||||
"groq/llama-3.1-8b-instant",
|
||||
"groq/llama-3.1-70b-versatile",
|
||||
"groq/llama-3.1-405b-reasoning",
|
||||
"groq/llama-3.2-3b-preview",
|
||||
"groq/llama-3.2-1b-preview",
|
||||
"groq/mixtral-8x7b-32768",
|
||||
"groq/gemma2-9b-it",
|
||||
"groq/gemma-7b-it",
|
||||
],
|
||||
"ollama": ["ollama/llama3.1", "ollama/mixtral"],
|
||||
"ollama": ["ollama/llama3.2", "ollama/llama3.3", "ollama/mixtral", "ollama/deepseek-r1"],
|
||||
"watson": [
|
||||
"watsonx/meta-llama/llama-3-1-70b-instruct",
|
||||
"watsonx/meta-llama/llama-3-1-8b-instruct",
|
||||
|
||||
@@ -110,9 +110,6 @@ def create_folder_structure(name, parent_folder=None):
|
||||
(folder_path / "src" / folder_name).mkdir(parents=True)
|
||||
(folder_path / "src" / folder_name / "tools").mkdir(parents=True)
|
||||
(folder_path / "src" / folder_name / "config").mkdir(parents=True)
|
||||
(folder_path / "src" / folder_name / ".agents" / "research").mkdir(
|
||||
parents=True
|
||||
)
|
||||
|
||||
return folder_path, folder_name, class_name
|
||||
|
||||
@@ -157,13 +154,6 @@ def copy_template_files(folder_path, name, class_name, parent_folder):
|
||||
dst_file = src_folder / file_name
|
||||
copy_template(src_file, dst_file, name, class_name, folder_path.name)
|
||||
|
||||
# Copy Skills.md from .agents/research template
|
||||
skills_src = templates_dir / ".agents" / "research" / "Skills.md"
|
||||
skills_dst = src_folder / ".agents" / "research" / "Skills.md"
|
||||
if skills_src.exists():
|
||||
skills_dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
copy_template(skills_src, skills_dst, name, class_name, folder_path.name)
|
||||
|
||||
|
||||
def create_crew(name, provider=None, skip_provider=False, parent_folder=None):
|
||||
folder_path, folder_name, class_name = create_folder_structure(name, parent_folder)
|
||||
|
||||
@@ -54,18 +54,11 @@ class {{crew_name}}():
|
||||
"""Creates the {{crew_name}} crew"""
|
||||
# To learn how to add knowledge sources to your crew, check out the documentation:
|
||||
# https://docs.crewai.com/concepts/knowledge#what-is-knowledge
|
||||
# Skills.md files under .agents/<skill_name>/ are loaded via get_skills_knowledge_sources()
|
||||
|
||||
skills_sources = (
|
||||
self.get_skills_knowledge_sources()
|
||||
if hasattr(self, "get_skills_knowledge_sources")
|
||||
else []
|
||||
)
|
||||
return Crew(
|
||||
agents=self.agents, # Automatically created by the @agent decorator
|
||||
tasks=self.tasks, # Automatically created by the @task decorator
|
||||
process=Process.sequential,
|
||||
verbose=True,
|
||||
knowledge_sources=skills_sources,
|
||||
# process=Process.hierarchical, # In case you wanna use that instead https://docs.crewai.com/how-to/Hierarchical/
|
||||
)
|
||||
|
||||
@@ -178,57 +178,6 @@ def _set_mcp_params(cls: type[CrewClass]) -> None:
|
||||
cls.mcp_connect_timeout = getattr(cls, "mcp_connect_timeout", 30)
|
||||
|
||||
|
||||
def _set_skills_params(cls: type[CrewClass]) -> None:
|
||||
"""Set the skills directory path for the crew class.
|
||||
|
||||
Args:
|
||||
cls: Crew class to configure.
|
||||
"""
|
||||
cls.skills_directory = getattr(cls, "skills_directory", ".agents")
|
||||
|
||||
|
||||
def get_skills_knowledge_sources(self: CrewInstance) -> list[Any]:
|
||||
"""Discover Skills.md files under .agents/<skill_name>/ and return them as knowledge sources.
|
||||
|
||||
Looks for src/<project>/.agents/<skill_name>/Skills.md (relative to the crew class
|
||||
base_directory). Each found file is wrapped in a CrewDoclingSource so the crew can
|
||||
query it via RAG. Requires the docling package; if not installed, returns an empty list.
|
||||
|
||||
Returns:
|
||||
List of knowledge sources (CrewDoclingSource instances), or empty list if
|
||||
.agents is missing, has no Skills.md files, or docling is not installed.
|
||||
"""
|
||||
skills_dir_name = getattr(self, "skills_directory", ".agents")
|
||||
skills_dir = self.base_directory / skills_dir_name
|
||||
if not skills_dir.exists() or not skills_dir.is_dir():
|
||||
return []
|
||||
|
||||
try:
|
||||
from crewai.knowledge.source.base_knowledge_source import BaseKnowledgeSource
|
||||
from crewai.knowledge.source.crew_docling_source import CrewDoclingSource
|
||||
except ImportError:
|
||||
logging.warning(
|
||||
"Skills.md support requires the docling package. "
|
||||
"Install it with: uv add docling"
|
||||
)
|
||||
return []
|
||||
|
||||
sources: list[Any] = []
|
||||
for subdir in sorted(skills_dir.iterdir()):
|
||||
if not subdir.is_dir():
|
||||
continue
|
||||
skills_md = subdir / "Skills.md"
|
||||
if skills_md.exists():
|
||||
try:
|
||||
source = CrewDoclingSource(file_paths=[skills_md])
|
||||
sources.append(source)
|
||||
except Exception as e:
|
||||
logging.warning(
|
||||
f"Could not create knowledge source for {skills_md}: {e}"
|
||||
)
|
||||
return sources
|
||||
|
||||
|
||||
def _is_string_list(value: list[str] | list[BaseTool]) -> TypeGuard[list[str]]:
|
||||
"""Type guard to check if list contains strings rather than BaseTool instances.
|
||||
|
||||
@@ -782,7 +731,6 @@ _CLASS_SETUP_FUNCTIONS: tuple[Callable[[type[CrewClass]], None], ...] = (
|
||||
_set_base_directory,
|
||||
_set_config_paths,
|
||||
_set_mcp_params,
|
||||
_set_skills_params,
|
||||
)
|
||||
|
||||
_METHODS_TO_INJECT = (
|
||||
@@ -791,7 +739,6 @@ _METHODS_TO_INJECT = (
|
||||
_load_config,
|
||||
load_configurations,
|
||||
staticmethod(load_yaml),
|
||||
get_skills_knowledge_sources,
|
||||
map_all_agent_variables,
|
||||
_map_agent_variables,
|
||||
map_all_task_variables,
|
||||
|
||||
@@ -81,10 +81,8 @@ class CrewInstance(Protocol):
|
||||
tasks_config: dict[str, Any]
|
||||
mcp_server_params: Any
|
||||
mcp_connect_timeout: int
|
||||
skills_directory: str
|
||||
|
||||
def load_configurations(self) -> None: ...
|
||||
def get_skills_knowledge_sources(self) -> list[Any]: ...
|
||||
def map_all_agent_variables(self) -> None: ...
|
||||
def map_all_task_variables(self) -> None: ...
|
||||
def close_mcp_server(self, instance: Self, outputs: CrewOutput) -> CrewOutput: ...
|
||||
@@ -124,10 +122,8 @@ class CrewClass(Protocol):
|
||||
original_tasks_config_path: str
|
||||
mcp_server_params: Any
|
||||
mcp_connect_timeout: int
|
||||
skills_directory: str
|
||||
close_mcp_server: Callable[..., Any]
|
||||
get_mcp_tools: Callable[..., list[BaseTool]]
|
||||
get_skills_knowledge_sources: Callable[..., list[Any]]
|
||||
_load_config: Callable[..., dict[str, Any]]
|
||||
load_configurations: Callable[..., None]
|
||||
load_yaml: Callable[..., dict[str, Any]]
|
||||
|
||||
@@ -148,10 +148,7 @@ def _llm_via_environment_or_fallback() -> LLM | None:
|
||||
"AWS_SECRET_ACCESS_KEY",
|
||||
"AWS_REGION_NAME",
|
||||
]
|
||||
if "/" in model_name:
|
||||
set_provider = model_name.partition("/")[0]
|
||||
else:
|
||||
set_provider = LLM._infer_provider_from_model(model_name)
|
||||
set_provider = model_name.partition("/")[0] if "/" in model_name else "openai"
|
||||
|
||||
if set_provider in ENV_VARS:
|
||||
env_vars_for_provider = ENV_VARS[set_provider]
|
||||
|
||||
@@ -18,3 +18,68 @@ def test_huggingface_models():
|
||||
"""Test that Huggingface models are properly configured."""
|
||||
assert "huggingface" in MODELS
|
||||
assert len(MODELS["huggingface"]) > 0
|
||||
|
||||
|
||||
def test_openai_models_include_latest():
|
||||
"""Test that OpenAI models include the latest models."""
|
||||
assert "openai" in MODELS
|
||||
openai_models = MODELS["openai"]
|
||||
assert len(openai_models) > 0
|
||||
assert "gpt-4o" in openai_models
|
||||
assert "gpt-4o-mini" in openai_models
|
||||
assert "o1" in openai_models
|
||||
assert "o3" in openai_models
|
||||
assert "o3-mini" in openai_models
|
||||
|
||||
|
||||
def test_anthropic_models_include_latest():
|
||||
"""Test that Anthropic models include the latest Claude models."""
|
||||
assert "anthropic" in MODELS
|
||||
anthropic_models = MODELS["anthropic"]
|
||||
assert len(anthropic_models) > 0
|
||||
assert "claude-3-7-sonnet-20250219" in anthropic_models
|
||||
assert "claude-3-5-sonnet-20241022" in anthropic_models
|
||||
assert "claude-3-5-haiku-20241022" in anthropic_models
|
||||
|
||||
|
||||
def test_groq_models_include_latest():
|
||||
"""Test that Groq models include the latest Llama models."""
|
||||
assert "groq" in MODELS
|
||||
groq_models = MODELS["groq"]
|
||||
assert len(groq_models) > 0
|
||||
assert "groq/llama-3.3-70b-versatile" in groq_models
|
||||
|
||||
|
||||
def test_ollama_models_include_latest():
|
||||
"""Test that Ollama models include the latest models."""
|
||||
assert "ollama" in MODELS
|
||||
ollama_models = MODELS["ollama"]
|
||||
assert len(ollama_models) > 0
|
||||
assert "ollama/llama3.2" in ollama_models
|
||||
assert "ollama/llama3.3" in ollama_models
|
||||
|
||||
|
||||
def test_all_providers_have_models():
|
||||
"""Test that all providers in PROVIDERS have corresponding models in MODELS."""
|
||||
providers_with_models = [
|
||||
"openai",
|
||||
"anthropic",
|
||||
"gemini",
|
||||
"nvidia_nim",
|
||||
"groq",
|
||||
"ollama",
|
||||
"watson",
|
||||
"bedrock",
|
||||
"huggingface",
|
||||
"sambanova",
|
||||
]
|
||||
for provider in providers_with_models:
|
||||
assert provider in MODELS, f"Provider {provider} should have models defined"
|
||||
assert len(MODELS[provider]) > 0, f"Provider {provider} should have at least one model"
|
||||
|
||||
|
||||
def test_all_providers_have_env_vars_or_defaults():
|
||||
"""Test that all providers have environment variable configurations."""
|
||||
for provider in PROVIDERS:
|
||||
if provider in ENV_VARS:
|
||||
assert len(ENV_VARS[provider]) > 0, f"Provider {provider} should have env var config"
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any, ClassVar
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
@@ -384,93 +382,3 @@ def test_internal_crew_with_mcp():
|
||||
adapter_mock.assert_called_once_with(
|
||||
{"host": "localhost", "port": 8000}, connect_timeout=120
|
||||
)
|
||||
|
||||
|
||||
def test_get_skills_knowledge_sources_discovery():
|
||||
"""get_skills_knowledge_sources discovers .agents/<skill_name>/Skills.md and returns sources."""
|
||||
|
||||
@CrewBase
|
||||
class SkillsCrew:
|
||||
agents_config = "nonexistent/agents.yaml"
|
||||
tasks_config = "nonexistent/tasks.yaml"
|
||||
agents: list[BaseAgent]
|
||||
tasks: list[Task]
|
||||
|
||||
@agent
|
||||
def researcher(self):
|
||||
return Agent(
|
||||
role="Researcher",
|
||||
goal="Research",
|
||||
backstory="Expert researcher",
|
||||
)
|
||||
|
||||
@task
|
||||
def research_task(self):
|
||||
return Task(
|
||||
description="Research", expected_output="Report", agent=self.researcher()
|
||||
)
|
||||
|
||||
@crew
|
||||
def crew(self):
|
||||
return Crew(agents=self.agents, tasks=self.tasks, verbose=True)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
base = Path(tmp)
|
||||
(base / ".agents" / "skill_a").mkdir(parents=True)
|
||||
(base / ".agents" / "skill_b").mkdir(parents=True)
|
||||
(base / ".agents" / "skill_a" / "Skills.md").write_text("# Skill A")
|
||||
(base / ".agents" / "skill_b" / "Skills.md").write_text("# Skill B")
|
||||
|
||||
crew_instance = SkillsCrew()
|
||||
crew_instance.base_directory = base
|
||||
sources = crew_instance.get_skills_knowledge_sources()
|
||||
|
||||
# With docling installed we get 2 sources; without it we get []
|
||||
if len(sources) == 2:
|
||||
paths = []
|
||||
for s in sources:
|
||||
paths.extend(getattr(s, "file_paths", []) or getattr(s, "safe_file_paths", []))
|
||||
path_strs = {str(Path(p).resolve()) for p in paths}
|
||||
expected_a = str((base / ".agents" / "skill_a" / "Skills.md").resolve())
|
||||
expected_b = str((base / ".agents" / "skill_b" / "Skills.md").resolve())
|
||||
assert expected_a in path_strs
|
||||
assert expected_b in path_strs
|
||||
else:
|
||||
assert len(sources) == 0
|
||||
|
||||
|
||||
def test_get_skills_knowledge_sources_missing_dir_returns_empty():
|
||||
"""get_skills_knowledge_sources returns [] when .agents does not exist."""
|
||||
|
||||
@CrewBase
|
||||
class NoSkillsCrew:
|
||||
agents_config = "nonexistent/agents.yaml"
|
||||
tasks_config = "nonexistent/tasks.yaml"
|
||||
agents: list[BaseAgent]
|
||||
tasks: list[Task]
|
||||
|
||||
@agent
|
||||
def researcher(self):
|
||||
return Agent(
|
||||
role="Researcher",
|
||||
goal="Research",
|
||||
backstory="Expert researcher",
|
||||
)
|
||||
|
||||
@task
|
||||
def research_task(self):
|
||||
return Task(
|
||||
description="Research", expected_output="Report", agent=self.researcher()
|
||||
)
|
||||
|
||||
@crew
|
||||
def crew(self):
|
||||
return Crew(agents=self.agents, tasks=self.tasks, verbose=True)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
base = Path(tmp)
|
||||
crew_instance = NoSkillsCrew()
|
||||
crew_instance.base_directory = base
|
||||
sources = crew_instance.get_skills_knowledge_sources()
|
||||
|
||||
assert sources == []
|
||||
|
||||
@@ -149,7 +149,6 @@ members = [
|
||||
"lib/crewai-tools",
|
||||
"lib/devtools",
|
||||
"lib/crewai-files",
|
||||
"testing_agents",
|
||||
]
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user