mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-10 08:38:30 +00:00
* feat: add crewai-tools workspace structure * Squashed 'temp-crewai-tools/' content from commit 9bae5633 git-subtree-dir: temp-crewai-tools git-subtree-split: 9bae56339096cb70f03873e600192bd2cd207ac9 * feat: configure crewai-tools workspace package with dependencies * fix: apply ruff auto-formatting to crewai-tools code * chore: update lockfile * fix: don't allow tool tests yet * fix: comment out extra pytest flags for now * fix: remove conflicting conftest.py from crewai-tools tests * fix: resolve dependency conflicts and test issues - Pin vcrpy to 7.0.0 to fix pytest-recording compatibility - Comment out types-requests to resolve urllib3 conflict - Update requests requirement in crewai-tools to >=2.32.0
45 lines
1.5 KiB
Python
45 lines
1.5 KiB
Python
import json
|
|
from unittest.mock import patch
|
|
from urllib.parse import urlparse
|
|
|
|
from crewai_tools.tools.parallel_tools.parallel_search_tool import (
|
|
ParallelSearchTool,
|
|
)
|
|
|
|
|
|
def test_requires_env_var(monkeypatch):
|
|
monkeypatch.delenv("PARALLEL_API_KEY", raising=False)
|
|
tool = ParallelSearchTool()
|
|
result = tool.run(objective="test")
|
|
assert "PARALLEL_API_KEY" in result
|
|
|
|
|
|
@patch("crewai_tools.tools.parallel_tools.parallel_search_tool.requests.post")
|
|
def test_happy_path(mock_post, monkeypatch):
|
|
monkeypatch.setenv("PARALLEL_API_KEY", "test")
|
|
|
|
mock_post.return_value.status_code = 200
|
|
mock_post.return_value.json.return_value = {
|
|
"search_id": "search_123",
|
|
"results": [
|
|
{
|
|
"url": "https://www.un.org/en/about-us/history-of-the-un",
|
|
"title": "History of the United Nations",
|
|
"excerpts": [
|
|
"Four months after the San Francisco Conference ended, the United Nations officially began, on 24 October 1945..."
|
|
],
|
|
}
|
|
],
|
|
}
|
|
|
|
tool = ParallelSearchTool()
|
|
result = tool.run(
|
|
objective="When was the UN established?", search_queries=["Founding year UN"]
|
|
)
|
|
data = json.loads(result)
|
|
assert "search_id" in data
|
|
urls = [r.get("url", "") for r in data.get("results", [])]
|
|
# Validate host against allowed set instead of substring matching
|
|
allowed_hosts = {"www.un.org", "un.org"}
|
|
assert any(urlparse(u).netloc in allowed_hosts for u in urls)
|