Compare commits

..

4 Commits

Author SHA1 Message Date
Devin AI
bde394024f Fix: Format imports in test file to fix lint error
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-04-16 12:09:46 +00:00
Devin AI
878bbef0e2 Fix: Update import order in test file to fix lint error
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-04-16 12:08:20 +00:00
Devin AI
71569a20da Fix: Sort imports in test file to fix lint error
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-04-16 12:07:23 +00:00
Devin AI
205e778d74 Fix: Implement add() method in BaseFileKnowledgeSource to solve PDFKnowledgeSource instantiation error
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-04-16 12:05:39 +00:00
4 changed files with 30 additions and 54 deletions

View File

@@ -64,6 +64,16 @@ class BaseFileKnowledgeSource(BaseKnowledgeSource, ABC):
"""Save the documents to the storage."""
self.storage.save(self.chunks)
def add(self) -> None:
"""
Process content from files, chunk it, compute embeddings, and save them.
This method is called after content is loaded from files.
"""
for _, text in self.content.items():
new_chunks = self._chunk_text(text)
self.chunks.extend(new_chunks)
self._save_documents()
def convert_to_path(self, path: Union[Path, str]) -> Path:
"""Convert a path to a Path object."""
return Path(KNOWLEDGE_DIRECTORY + "/" + path) if isinstance(path, str) else path

View File

@@ -92,8 +92,6 @@ def suppress_warnings():
class LLM:
MODELS_WITHOUT_STOP_SUPPORT = ["o3", "o3-mini", "o4-mini"]
def __init__(
self,
model: str,
@@ -157,7 +155,7 @@ class LLM:
"temperature": self.temperature,
"top_p": self.top_p,
"n": self.n,
"stop": self.stop if self.supports_stop_words() else None,
"stop": self.stop,
"max_tokens": self.max_tokens or self.max_completion_tokens,
"presence_penalty": self.presence_penalty,
"frequency_penalty": self.frequency_penalty,
@@ -195,19 +193,6 @@ class LLM:
return False
def supports_stop_words(self) -> bool:
"""
Determines whether the current model supports the 'stop' parameter.
This method checks if the model is in the list of models known not to support
stop words, and if not, it queries the litellm library to determine if the
model supports the 'stop' parameter.
Returns:
bool: True if the model supports stop words, False otherwise.
"""
if any(self.model.startswith(model) for model in self.MODELS_WITHOUT_STOP_SUPPORT):
return False
try:
params = get_supported_openai_params(model=self.model)
return "stop" in params

View File

@@ -0,0 +1,19 @@
from pathlib import Path
from unittest.mock import patch
import pytest
from crewai.knowledge.source.pdf_knowledge_source import PDFKnowledgeSource
@patch('crewai.knowledge.source.base_file_knowledge_source.BaseFileKnowledgeSource.validate_content')
@patch('crewai.knowledge.source.pdf_knowledge_source.PDFKnowledgeSource.load_content')
def test_pdf_knowledge_source_instantiation(mock_load_content, mock_validate_content, tmp_path):
"""Test that PDFKnowledgeSource can be instantiated without errors."""
mock_load_content.return_value = {}
pdf_path = tmp_path / "test.pdf"
pdf_path.touch() # Create the file
pdf_source = PDFKnowledgeSource(file_paths=[pdf_path])
assert isinstance(pdf_source, PDFKnowledgeSource)

View File

@@ -28,41 +28,3 @@ def test_llm_callback_replacement():
assert usage_metrics_1.successful_requests == 1
assert usage_metrics_2.successful_requests == 1
assert usage_metrics_1 == calc_handler_1.token_cost_process.get_summary()
class TestLLMStopWords:
"""Tests for LLM stop words functionality."""
def test_supports_stop_words_for_o3_model(self):
"""Test that supports_stop_words returns False for o3 model."""
llm = LLM(model="o3")
assert not llm.supports_stop_words()
def test_supports_stop_words_for_o4_mini_model(self):
"""Test that supports_stop_words returns False for o4-mini model."""
llm = LLM(model="o4-mini")
assert not llm.supports_stop_words()
def test_supports_stop_words_for_supported_model(self):
"""Test that supports_stop_words returns True for models that support stop words."""
llm = LLM(model="gpt-4")
assert llm.supports_stop_words()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_call_excludes_stop_parameter_for_unsupported_models(self, monkeypatch):
"""Test that the LLM.call method excludes the stop parameter for models that don't support it."""
def mock_completion(**kwargs):
assert 'stop' not in kwargs, "Stop parameter should be excluded for o3 model"
assert 'model' in kwargs, "Model parameter should be included"
assert 'messages' in kwargs, "Messages parameter should be included"
return {"choices": [{"message": {"content": "Hello, World!"}}]}
monkeypatch.setattr("litellm.completion", mock_completion)
llm = LLM(model="o3")
llm.stop = ["STOP"]
messages = [{"role": "user", "content": "Say 'Hello, World!'"}]
response = llm.call(messages)
assert response == "Hello, World!"