mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-07 15:18:29 +00:00
Compare commits
1 Commits
devin/1746
...
devin/1744
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59044b6512 |
@@ -219,7 +219,11 @@ class Flow(Generic[T], metaclass=FlowMeta):
|
||||
"""Returns the list of all outputs from executed methods."""
|
||||
return self._method_outputs
|
||||
|
||||
def _initialize_state(self, inputs: Dict[str, Any]) -> None:
|
||||
def _initialize_state(self, inputs: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""Initialize the state of the flow."""
|
||||
if inputs is None:
|
||||
return
|
||||
|
||||
if isinstance(self._state, BaseModel):
|
||||
# Structured state
|
||||
try:
|
||||
@@ -245,6 +249,8 @@ class Flow(Generic[T], metaclass=FlowMeta):
|
||||
self._state.update(inputs)
|
||||
else:
|
||||
raise TypeError("State must be a BaseModel instance or a dictionary.")
|
||||
|
||||
self._interpolate_inputs_in_crew(inputs)
|
||||
|
||||
def kickoff(self, inputs: Optional[Dict[str, Any]] = None) -> Any:
|
||||
self.event_emitter.send(
|
||||
@@ -406,6 +412,11 @@ class Flow(Generic[T], metaclass=FlowMeta):
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
def _interpolate_inputs_in_crew(self, inputs: Dict[str, Any]) -> None:
|
||||
"""Interpolate inputs in the crew's tasks and agents if a crew is present."""
|
||||
if hasattr(self, 'crew') and self.crew:
|
||||
self.crew._interpolate_inputs(inputs)
|
||||
|
||||
def plot(self, filename: str = "crewai_flow") -> None:
|
||||
self._telemetry.flow_plotting_span(
|
||||
self.__class__.__name__, list(self._methods.keys())
|
||||
|
||||
@@ -135,42 +135,13 @@ class EmbeddingConfigurator:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_api_url(api_url: str) -> str:
|
||||
"""
|
||||
Normalize API URL by ensuring it has a protocol.
|
||||
|
||||
Args:
|
||||
api_url: The API URL to normalize
|
||||
|
||||
Returns:
|
||||
Normalized URL with protocol (defaults to http:// if missing)
|
||||
"""
|
||||
if not (api_url.startswith("http://") or api_url.startswith("https://")):
|
||||
return f"http://{api_url}"
|
||||
return api_url
|
||||
|
||||
@staticmethod
|
||||
def _configure_huggingface(config: dict, model_name: str):
|
||||
"""
|
||||
Configure Huggingface embedding function with the provided config.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary for the Huggingface embedder
|
||||
model_name: Name of the model to use
|
||||
|
||||
Returns:
|
||||
Configured HuggingFaceEmbeddingServer instance
|
||||
"""
|
||||
def _configure_huggingface(config, model_name):
|
||||
from chromadb.utils.embedding_functions.huggingface_embedding_function import (
|
||||
HuggingFaceEmbeddingServer,
|
||||
)
|
||||
|
||||
api_url = config.get("api_url")
|
||||
if api_url:
|
||||
api_url = EmbeddingConfigurator._normalize_api_url(api_url)
|
||||
|
||||
return HuggingFaceEmbeddingServer(
|
||||
url=api_url,
|
||||
url=config.get("api_url"),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -322,3 +322,43 @@ def test_router_with_multiple_conditions():
|
||||
|
||||
# final_step should run after router_and
|
||||
assert execution_order.index("log_final_step") > execution_order.index("router_and")
|
||||
|
||||
|
||||
def test_flow_inputs_passed_to_tasks():
|
||||
"""Test that inputs passed to Flow's kickoff method are correctly interpolated in task descriptions."""
|
||||
from crewai import Agent, Crew, Task
|
||||
from crewai.llm import LLM
|
||||
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test Goal",
|
||||
backstory="Test Backstory",
|
||||
llm=LLM(model="gpt-4o-mini")
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Process data about {topic}",
|
||||
expected_output="Information about {topic}",
|
||||
agent=agent
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
agents=[agent],
|
||||
tasks=[task]
|
||||
)
|
||||
|
||||
class TestFlow(Flow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.crew = crew
|
||||
|
||||
@start()
|
||||
def start_process(self):
|
||||
pass
|
||||
|
||||
flow = TestFlow()
|
||||
inputs = {"topic": "artificial intelligence"}
|
||||
flow.kickoff(inputs=inputs)
|
||||
|
||||
assert task.description == "Process data about artificial intelligence"
|
||||
assert task.expected_output == "Information about artificial intelligence"
|
||||
|
||||
@@ -584,84 +584,3 @@ def test_docling_source_with_local_file():
|
||||
docling_source = CrewDoclingSource(file_paths=[pdf_path])
|
||||
assert docling_source.file_paths == [pdf_path]
|
||||
assert docling_source.content is not None
|
||||
|
||||
|
||||
def test_huggingface_url_validation():
|
||||
"""Test that Huggingface embedder properly handles URLs without protocol."""
|
||||
from crewai.utilities.embedding_configurator import EmbeddingConfigurator
|
||||
|
||||
config_missing_protocol = {
|
||||
"api_url": "localhost:8080/embed"
|
||||
}
|
||||
embedding_function = EmbeddingConfigurator()._configure_huggingface(
|
||||
config_missing_protocol, "test-model"
|
||||
)
|
||||
# Verify that the URL now has a protocol
|
||||
assert embedding_function._api_url.startswith("http://")
|
||||
|
||||
config_with_protocol = {
|
||||
"api_url": "https://localhost:8080/embed"
|
||||
}
|
||||
embedding_function = EmbeddingConfigurator()._configure_huggingface(
|
||||
config_with_protocol, "test-model"
|
||||
)
|
||||
# Verify that the URL remains unchanged
|
||||
assert embedding_function._api_url == "https://localhost:8080/embed"
|
||||
|
||||
config_with_other_protocol = {
|
||||
"api_url": "http://localhost:8080/embed"
|
||||
}
|
||||
embedding_function = EmbeddingConfigurator()._configure_huggingface(
|
||||
config_with_other_protocol, "test-model"
|
||||
)
|
||||
# Verify that the URL remains unchanged
|
||||
assert embedding_function._api_url == "http://localhost:8080/embed"
|
||||
|
||||
config_no_url = {}
|
||||
embedding_function = EmbeddingConfigurator()._configure_huggingface(
|
||||
config_no_url, "test-model"
|
||||
)
|
||||
# Verify that no exception is raised when URL is None
|
||||
assert embedding_function._api_url == 'None'
|
||||
|
||||
|
||||
def test_huggingface_missing_protocol_with_json_source():
|
||||
"""Test that JSONKnowledgeSource works with Huggingface embedder without URL protocol."""
|
||||
import os
|
||||
import json
|
||||
import tempfile
|
||||
from crewai.knowledge.source.json_knowledge_source import JSONKnowledgeSource
|
||||
from crewai.utilities.embedding_configurator import EmbeddingConfigurator
|
||||
|
||||
# Create a temporary JSON file
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as temp:
|
||||
json.dump({"test": "data", "nested": {"value": 123}}, temp)
|
||||
json_path = temp.name
|
||||
|
||||
# Test that the URL validation works in the embedder configurator
|
||||
config = {
|
||||
"api_url": "localhost:8080/embed" # Missing protocol
|
||||
}
|
||||
embedding_function = EmbeddingConfigurator()._configure_huggingface(
|
||||
config, "test-model"
|
||||
)
|
||||
# Verify that the URL now has a protocol
|
||||
assert embedding_function._api_url.startswith("http://")
|
||||
|
||||
os.unlink(json_path)
|
||||
|
||||
|
||||
def test_huggingface_missing_protocol_with_string_source():
|
||||
"""Test that StringKnowledgeSource works with Huggingface embedder without URL protocol."""
|
||||
from crewai.knowledge.source.string_knowledge_source import StringKnowledgeSource
|
||||
from crewai.utilities.embedding_configurator import EmbeddingConfigurator
|
||||
|
||||
# Test that the URL validation works in the embedder configurator
|
||||
config = {
|
||||
"api_url": "localhost:8080/embed" # Missing protocol
|
||||
}
|
||||
embedding_function = EmbeddingConfigurator()._configure_huggingface(
|
||||
config, "test-model"
|
||||
)
|
||||
# Verify that the URL now has a protocol
|
||||
assert embedding_function._api_url.startswith("http://")
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"test": "data",
|
||||
"nested": {
|
||||
"value": 123
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user