Compare commits

..

3 Commits

Author SHA1 Message Date
Tony Kipkemboi
90359dbbfb Merge branch 'main' into devin/1745784489-fix-crewstructuredtool-task-compatibility 2025-04-28 07:26:33 -07:00
Devin AI
1133994ec7 fix: update type annotations in agent.py for CrewStructuredTool compatibility
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-04-27 20:21:40 +00:00
Devin AI
aa1d04af41 fix: allow CrewStructuredTool to be used with Task's tools parameter
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-04-27 20:15:40 +00:00
38 changed files with 110 additions and 420 deletions

View File

@@ -255,11 +255,7 @@ custom_agent = Agent(
- `response_template`: Formats agent responses
<Note>
When using custom templates, ensure that both `system_template` and `prompt_template` are defined. The `response_template` is optional but recommended for consistent output formatting.
</Note>
<Note>
When using custom templates, you can use variables like `{role}`, `{goal}`, and `{backstory}` in your templates. These will be automatically populated during execution.
When using custom templates, you can use variables like `{role}`, `{goal}`, and `{input}` in your templates. These will be automatically populated during execution.
</Note>
## Agent Tools

View File

@@ -45,7 +45,7 @@ Documentation = "https://docs.crewai.com"
Repository = "https://github.com/crewAIInc/crewAI"
[project.optional-dependencies]
tools = ["crewai-tools~=0.42.2"]
tools = ["crewai-tools~=0.42.0"]
embeddings = [
"tiktoken~=0.7.0"
]
@@ -60,7 +60,7 @@ pandas = [
openpyxl = [
"openpyxl>=3.1.5",
]
mem0 = ["mem0ai>=0.1.94"]
mem0 = ["mem0ai>=0.1.29"]
docling = [
"docling>=2.12.0",
]

View File

@@ -17,6 +17,7 @@ from crewai.security import Fingerprint
from crewai.task import Task
from crewai.tools import BaseTool
from crewai.tools.agent_tools.agent_tools import AgentTools
from crewai.tools.structured_tool import CrewStructuredTool
from crewai.utilities import Converter, Prompts
from crewai.utilities.agent_utils import (
get_tool_names,
@@ -185,7 +186,7 @@ class Agent(BaseAgent):
self,
task: Task,
context: Optional[str] = None,
tools: Optional[List[BaseTool]] = None
tools: Optional[List[Union[BaseTool, CrewStructuredTool]]] = None
) -> str:
"""Execute a task with the agent.
@@ -406,14 +407,14 @@ class Agent(BaseAgent):
)["output"]
def create_agent_executor(
self, tools: Optional[List[BaseTool]] = None, task=None
self, tools: Optional[List[Union[BaseTool, CrewStructuredTool]]] = None, task=None
) -> None:
"""Create an agent executor for the agent.
Returns:
An instance of the CrewAgentExecutor class.
"""
raw_tools: List[BaseTool] = tools or self.tools or []
raw_tools: List[Union[BaseTool, CrewStructuredTool]] = tools or self.tools or []
parsed_tools = parse_tools(raw_tools)
prompt = Prompts(

View File

@@ -1 +0,0 @@
"""LangGraph adapter for crewAI."""

View File

@@ -1 +0,0 @@
"""OpenAI agent adapters for crewAI."""

View File

@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
authors = [{ name = "Your Name", email = "you@example.com" }]
requires-python = ">=3.10,<3.13"
dependencies = [
"crewai[tools]>=0.117.1,<1.0.0"
"crewai[tools]>=0.117.0,<1.0.0"
]
[project.scripts]

View File

@@ -1 +0,0 @@
"""Poem crew template."""

View File

@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
authors = [{ name = "Your Name", email = "you@example.com" }]
requires-python = ">=3.10,<3.13"
dependencies = [
"crewai[tools]>=0.117.1,<1.0.0",
"crewai[tools]>=0.117.0,<1.0.0",
]
[project.scripts]

View File

@@ -5,7 +5,7 @@ description = "Power up your crews with {{folder_name}}"
readme = "README.md"
requires-python = ">=3.10,<3.13"
dependencies = [
"crewai[tools]>=0.117.1"
"crewai[tools]>=0.117.0"
]
[tool.crewai]

View File

@@ -1 +0,0 @@
"""Knowledge utilities for crewAI."""

View File

@@ -204,8 +204,6 @@ LLM_CONTEXT_WINDOW_SIZES = {
DEFAULT_CONTEXT_WINDOW_SIZE = 8192
CONTEXT_WINDOW_USAGE_RATIO = 0.75
OPENROUTER_PROVIDER = "openrouter"
@contextmanager
def suppress_warnings():
@@ -272,41 +270,8 @@ class LLM(BaseLLM):
callbacks: List[Any] = [],
reasoning_effort: Optional[Literal["none", "low", "medium", "high"]] = None,
stream: bool = False,
force_structured_output: bool = False,
**kwargs,
):
"""Initialize an LLM instance.
Args:
model: The language model to use.
timeout: The request timeout in seconds.
temperature: The temperature to use for sampling.
top_p: The cumulative probability for top-p sampling.
n: The number of completions to generate.
stop: A list of strings to stop generation when encountered.
max_completion_tokens: The maximum number of tokens to generate.
max_tokens: Alias for max_completion_tokens.
presence_penalty: The presence penalty to use.
frequency_penalty: The frequency penalty to use.
logit_bias: The logit bias to use.
response_format: The format to return the response in.
seed: The random seed to use.
logprobs: Whether to return log probabilities.
top_logprobs: Whether to return the top log probabilities.
base_url: The base URL to use.
api_base: Alias for base_url.
api_version: The API version to use.
api_key: The API key to use.
callbacks: A list of callbacks to use.
reasoning_effort: The reasoning effort to use (e.g., "low", "medium", "high").
stream: Whether to stream the response.
force_structured_output: When True and using OpenRouter provider, bypasses
response schema validation. Use with caution as it may lead to runtime
errors if the model doesn't actually support structured outputs.
Only use this if you're certain the model supports the expected format.
Defaults to False.
**kwargs: Additional parameters to pass to the LLM.
"""
self.model = model
self.timeout = timeout
self.temperature = temperature
@@ -331,7 +296,6 @@ class LLM(BaseLLM):
self.additional_params = kwargs
self.is_anthropic = self._is_anthropic_model(model)
self.stream = stream
self.force_structured_output = force_structured_output
litellm.drop_params = True
@@ -1027,32 +991,14 @@ class LLM(BaseLLM):
- "gemini/gemini-1.5-pro" yields "gemini"
- If no slash is present, "openai" is assumed.
"""
if self.response_format is None:
return
provider = self._get_custom_llm_provider()
# Check if we're bypassing validation for OpenRouter
is_openrouter = provider and provider.lower() == OPENROUTER_PROVIDER.lower()
is_openrouter_bypass = is_openrouter and self.force_structured_output
# Check if the model supports response schema
is_schema_supported = supports_response_schema(
if self.response_format is not None and not supports_response_schema(
model=self.model,
custom_llm_provider=provider,
)
if is_openrouter_bypass:
print(
f"Warning: Forcing structured output for OpenRouter model {self.model}. "
"Please ensure the model supports the expected response format."
)
if not (is_schema_supported or is_openrouter_bypass):
):
raise ValueError(
f"The model {self.model} does not support response_format for provider '{provider}'. "
f"Please remove response_format, use a supported model, or if you're using an "
f"OpenRouter model that supports structured output, set force_structured_output=True."
"Please remove response_format or use a supported model."
)
def supports_function_calling(self) -> bool:

View File

@@ -1 +0,0 @@
"""LLM implementations for crewAI."""

View File

@@ -1 +0,0 @@
"""Third-party LLM implementations for crewAI."""

View File

@@ -1 +0,0 @@
"""Memory storage implementations for crewAI."""

View File

@@ -88,9 +88,7 @@ class Mem0Storage(Storage):
}
if params:
if isinstance(self.memory, MemoryClient):
params["output_format"] = "v1.1"
self.memory.add(value, **params)
self.memory.add(value, **params | {"output_format": "v1.1"})
def search(
self,
@@ -98,7 +96,7 @@ class Mem0Storage(Storage):
limit: int = 3,
score_threshold: float = 0.35,
) -> List[Any]:
params = {"query": query, "limit": limit, "output_format": "v1.1"}
params = {"query": query, "limit": limit}
if user_id := self._get_user_id():
params["user_id"] = user_id
@@ -118,11 +116,8 @@ class Mem0Storage(Storage):
# Discard the filters for now since we create the filters
# automatically when the crew is created.
if isinstance(self.memory, Memory):
del params["metadata"], params["output_format"]
results = self.memory.search(**params)
return [r for r in results["results"] if r["score"] >= score_threshold]
return [r for r in results if r["score"] >= score_threshold]
def _get_user_id(self) -> str:
return self._get_config().get("user_id", "")

View File

@@ -8,6 +8,8 @@ from dotenv import load_dotenv
load_dotenv()
logging.basicConfig(level=logging.WARNING)
T = TypeVar("T", bound=type)
"""Base decorator for creating crew classes with configuration and function management."""

View File

@@ -40,6 +40,7 @@ from crewai.tasks.guardrail_result import GuardrailResult
from crewai.tasks.output_format import OutputFormat
from crewai.tasks.task_output import TaskOutput
from crewai.tools.base_tool import BaseTool
from crewai.tools.structured_tool import CrewStructuredTool
from crewai.utilities.config import process_config
from crewai.utilities.converter import Converter, convert_to_model
from crewai.utilities.events import (
@@ -72,7 +73,11 @@ class Task(BaseModel):
security_config: Security configuration including fingerprinting.
tools: List of tools/resources limited for task execution.
"""
model_config = {
"arbitrary_types_allowed": True,
}
__hash__ = object.__hash__ # type: ignore
logger: ClassVar[logging.Logger] = logging.getLogger(__name__)
used_tools: int = 0
@@ -118,7 +123,7 @@ class Task(BaseModel):
output: Optional[TaskOutput] = Field(
description="Task output, it's final result after being executed", default=None
)
tools: Optional[List[BaseTool]] = Field(
tools: Optional[List[Union[BaseTool, CrewStructuredTool]]] = Field(
default_factory=list,
description="Tools the agent is limited to use for this task.",
)

View File

@@ -1 +0,0 @@
"""Agent tools for crewAI."""

View File

@@ -21,15 +21,17 @@ from crewai.utilities.exceptions.context_window_exceeding_exception import (
)
def parse_tools(tools: List[BaseTool]) -> List[CrewStructuredTool]:
def parse_tools(tools: List[Union[BaseTool, CrewStructuredTool]]) -> List[CrewStructuredTool]:
"""Parse tools to be used for the task."""
tools_list = []
for tool in tools:
if isinstance(tool, CrewAITool):
tools_list.append(tool.to_structured_tool())
elif isinstance(tool, CrewStructuredTool):
tools_list.append(tool)
else:
raise ValueError("Tool is not a CrewStructuredTool or BaseTool")
raise ValueError("Tool must be an instance of BaseTool or CrewStructuredTool")
return tools_list

View File

@@ -1 +0,0 @@
"""Evaluators for crewAI."""

View File

@@ -1 +0,0 @@
"""Event utilities for crewAI."""

View File

@@ -1 +0,0 @@
"""Exceptions for crewAI."""

View File

@@ -54,12 +54,10 @@ class Prompts(BaseModel):
response_template=None,
) -> str:
"""Constructs a prompt string from specified components."""
if not system_template or not prompt_template:
# If any of the required templates are missing, fall back to the default format
if not system_template and not prompt_template:
prompt_parts = [self.i18n.slice(component) for component in components]
prompt = "".join(prompt_parts)
else:
# All templates are provided, use them
prompt_parts = [
self.i18n.slice(component)
for component in components
@@ -69,12 +67,8 @@ class Prompts(BaseModel):
prompt = prompt_template.replace(
"{{ .Prompt }}", "".join(self.i18n.slice("task"))
)
# Handle missing response_template
if response_template:
response = response_template.split("{{ .Response }}")[0]
prompt = f"{system}\n{prompt}\n{response}"
else:
prompt = f"{system}\n{prompt}"
response = response_template.split("{{ .Response }}")[0]
prompt = f"{system}\n{prompt}\n{response}"
prompt = (
prompt.replace("{goal}", self.agent.goal)

View File

@@ -72,54 +72,9 @@ def test_agent_creation():
assert agent.role == "test role"
assert agent.goal == "test goal"
assert agent.backstory == "test backstory"
def test_agent_with_only_system_template():
"""Test that an agent with only system_template works without errors."""
agent = Agent(
role="Test Role",
goal="Test Goal",
backstory="Test Backstory",
allow_delegation=False,
system_template="You are a test agent...",
# prompt_template is intentionally missing
)
assert agent.role == "Test Role"
assert agent.goal == "Test Goal"
assert agent.backstory == "Test Backstory"
def test_agent_with_only_prompt_template():
"""Test that an agent with only system_template works without errors."""
agent = Agent(
role="Test Role",
goal="Test Goal",
backstory="Test Backstory",
allow_delegation=False,
prompt_template="You are a test agent...",
# prompt_template is intentionally missing
)
assert agent.role == "Test Role"
assert agent.goal == "Test Goal"
assert agent.backstory == "Test Backstory"
assert agent.tools == []
def test_agent_with_missing_response_template():
"""Test that an agent with system_template and prompt_template but no response_template works without errors."""
agent = Agent(
role="Test Role",
goal="Test Goal",
backstory="Test Backstory",
allow_delegation=False,
system_template="You are a test agent...",
prompt_template="This is a test prompt...",
# response_template is intentionally missing
)
assert agent.role == "Test Role"
assert agent.goal == "Test Goal"
assert agent.backstory == "Test Backstory"
def test_agent_default_values():
agent = Agent(role="test role", goal="test goal", backstory="test backstory")
assert agent.llm.model == "gpt-4o-mini"

View File

@@ -1 +0,0 @@
"""Tests for agent adapters."""

View File

@@ -1 +0,0 @@
"""Tests for agent builder."""

View File

@@ -1 +0,0 @@
"""Tests for CLI deploy."""

View File

@@ -256,56 +256,6 @@ def test_validate_call_params_no_response_format():
llm._validate_call_params()
def test_validate_call_params_openrouter_force_structured_output():
"""
Test that force_structured_output parameter allows bypassing response schema
validation for OpenRouter models.
"""
class DummyResponse(BaseModel):
a: int
# Test with OpenRouter and force_structured_output=True
llm = LLM(
model="openrouter/deepseek/deepseek-chat",
response_format=DummyResponse,
force_structured_output=True
)
# Should not raise any error with force_structured_output=True
llm._validate_call_params()
# Test with OpenRouter and force_structured_output=False (default)
# Patch supports_response_schema to simulate an unsupported model.
with patch("crewai.llm.supports_response_schema", return_value=False):
llm = LLM(
model="openrouter/deepseek/deepseek-chat",
response_format=DummyResponse,
force_structured_output=False
)
with pytest.raises(ValueError) as excinfo:
llm._validate_call_params()
assert "does not support response_format" in str(excinfo.value)
def test_force_structured_output_bypasses_only_openrouter():
"""
Test that force_structured_output parameter only bypasses validation for
OpenRouter models and not for other providers.
"""
class DummyResponse(BaseModel):
a: int
# Test with non-OpenRouter provider and force_structured_output=True
with patch("crewai.llm.supports_response_schema", return_value=False):
llm = LLM(
model="otherprovider/model-name",
response_format=DummyResponse,
force_structured_output=True
)
with pytest.raises(ValueError) as excinfo:
llm._validate_call_params()
assert "does not support response_format" in str(excinfo.value)
@pytest.mark.vcr(filter_headers=["authorization"], filter_query_parameters=["key"])
@pytest.mark.parametrize(
"model",

View File

@@ -1 +0,0 @@
"""Tests for memory."""

View File

@@ -1,3 +1,4 @@
from unittest.mock import MagicMock, patch
import pytest
@@ -64,4 +65,4 @@ def test_save_and_search(user_memory):
with patch.object(UserMemory, 'search', return_value=expected_result) as mock_search:
find = UserMemory.search("test value", score_threshold=0.01)[0]
mock_search.assert_called_once_with("test value", score_threshold=0.01)
assert find == expected_result[0]
assert find == expected_result[0]

View File

@@ -1 +0,0 @@
"""Tests for storage."""

View File

@@ -15,7 +15,6 @@ from crewai.task import Task
class MockCrew:
def __init__(self, memory_config):
self.memory_config = memory_config
self.agents = [MagicMock(role="Test Agent")]
@pytest.fixture
@@ -108,13 +107,11 @@ def mem0_storage_with_memory_client_using_config_from_crew(mock_mem0_memory_clie
@pytest.fixture
def mem0_storage_with_memory_client_using_explictly_config(mock_mem0_memory_client, mock_mem0_memory):
def mem0_storage_with_memory_client_using_explictly_config(mock_mem0_memory_client):
"""Fixture to create a Mem0Storage instance with mocked dependencies"""
# We need to patch both MemoryClient and Memory to prevent actual initialization
with patch.object(MemoryClient, "__new__", return_value=mock_mem0_memory_client), \
patch.object(Memory, "__new__", return_value=mock_mem0_memory):
# We need to patch the MemoryClient before it's instantiated
with patch.object(MemoryClient, "__new__", return_value=mock_mem0_memory_client):
crew = MockCrew(
memory_config={
"provider": "mem0",
@@ -158,82 +155,3 @@ def test_mem0_storage_with_explict_config(
mem0_storage_with_memory_client_using_explictly_config.memory_config
== expected_config
)
def test_save_method_with_memory_oss(mem0_storage_with_mocked_config):
"""Test save method for different memory types"""
mem0_storage, _, _ = mem0_storage_with_mocked_config
mem0_storage.memory.add = MagicMock()
# Test short_term memory type (already set in fixture)
test_value = "This is a test memory"
test_metadata = {"key": "value"}
mem0_storage.save(test_value, test_metadata)
mem0_storage.memory.add.assert_called_once_with(
test_value,
agent_id="Test_Agent",
infer=False,
metadata={"type": "short_term", "key": "value"},
)
def test_save_method_with_memory_client(mem0_storage_with_memory_client_using_config_from_crew):
"""Test save method for different memory types"""
mem0_storage = mem0_storage_with_memory_client_using_config_from_crew
mem0_storage.memory.add = MagicMock()
# Test short_term memory type (already set in fixture)
test_value = "This is a test memory"
test_metadata = {"key": "value"}
mem0_storage.save(test_value, test_metadata)
mem0_storage.memory.add.assert_called_once_with(
test_value,
agent_id="Test_Agent",
infer=False,
metadata={"type": "short_term", "key": "value"},
output_format="v1.1"
)
def test_search_method_with_memory_oss(mem0_storage_with_mocked_config):
"""Test search method for different memory types"""
mem0_storage, _, _ = mem0_storage_with_mocked_config
mock_results = {"results": [{"score": 0.9, "content": "Result 1"}, {"score": 0.4, "content": "Result 2"}]}
mem0_storage.memory.search = MagicMock(return_value=mock_results)
results = mem0_storage.search("test query", limit=5, score_threshold=0.5)
mem0_storage.memory.search.assert_called_once_with(
query="test query",
limit=5,
agent_id="Test_Agent",
user_id="test_user"
)
assert len(results) == 1
assert results[0]["content"] == "Result 1"
def test_search_method_with_memory_client(mem0_storage_with_memory_client_using_config_from_crew):
"""Test search method for different memory types"""
mem0_storage = mem0_storage_with_memory_client_using_config_from_crew
mock_results = {"results": [{"score": 0.9, "content": "Result 1"}, {"score": 0.4, "content": "Result 2"}]}
mem0_storage.memory.search = MagicMock(return_value=mock_results)
results = mem0_storage.search("test query", limit=5, score_threshold=0.5)
mem0_storage.memory.search.assert_called_once_with(
query="test query",
limit=5,
agent_id="Test_Agent",
metadata={"type": "short_term"},
user_id="test_user",
output_format='v1.1'
)
assert len(results) == 1
assert results[0]["content"] == "Result 1"

View File

@@ -1 +0,0 @@
"""Tests for tools."""

View File

@@ -0,0 +1,62 @@
import pytest
from pydantic import BaseModel, Field
from crewai.task import Task
from crewai.tools.structured_tool import CrewStructuredTool
@pytest.fixture
def simple_tool_function():
def test_func(param1: str, param2: int = 0) -> str:
"""Test function with basic params."""
return f"{param1} {param2}"
return test_func
def test_task_with_structured_tool(simple_tool_function):
"""Test that CrewStructuredTool can be used directly with Task."""
tool = CrewStructuredTool.from_function(
func=simple_tool_function,
name="test_tool",
description="Test tool description"
)
task = Task(
description="Test task description",
expected_output="Expected output",
tools=[tool]
)
assert len(task.tools) == 1
assert task.tools[0] == tool
def test_mixed_tool_types(simple_tool_function):
"""Test that both BaseTool and CrewStructuredTool can be used together with Task."""
from crewai.tools import BaseTool
structured_tool = CrewStructuredTool.from_function(
func=simple_tool_function,
name="structured_tool",
description="Structured tool description"
)
class TestBaseTool(BaseTool):
name: str = "base_tool"
description: str = "Base tool description"
def _run(self, query: str) -> str:
return f"Result for {query}"
base_tool = TestBaseTool()
task = Task(
description="Test task description",
expected_output="Expected output",
tools=[structured_tool, base_tool]
)
assert len(task.tools) == 2
assert task.tools[0] == structured_tool
assert isinstance(task.tools[1], BaseTool)

View File

@@ -1 +0,0 @@
"""Tests for utilities."""

View File

@@ -1 +0,0 @@
"""Tests for evaluators."""

View File

@@ -1 +0,0 @@
"""Tests for events."""

137
uv.lock generated
View File

@@ -73,78 +73,6 @@ resolution-markers = [
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version < '3.11' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version < '3.11' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version < '3.11' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version < '3.11' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version < '3.11' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version < '3.11' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version < '3.11' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version < '3.11' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'darwin'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'darwin'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform == 'linux'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform == 'linux'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
]
[[package]]
@@ -826,7 +754,7 @@ requires-dist = [
{ name = "blinker", specifier = ">=1.9.0" },
{ name = "chromadb", specifier = ">=0.5.23" },
{ name = "click", specifier = ">=8.1.7" },
{ name = "crewai-tools", marker = "extra == 'tools'", specifier = "~=0.42.2" },
{ name = "crewai-tools", marker = "extra == 'tools'", specifier = "~=0.42.0" },
{ name = "docling", marker = "extra == 'docling'", specifier = ">=2.12.0" },
{ name = "fastembed", marker = "extra == 'fastembed'", specifier = ">=0.4.1" },
{ name = "instructor", specifier = ">=1.3.3" },
@@ -834,7 +762,7 @@ requires-dist = [
{ name = "json5", specifier = ">=0.10.0" },
{ name = "jsonref", specifier = ">=1.1.0" },
{ name = "litellm", specifier = "==1.67.2" },
{ name = "mem0ai", marker = "extra == 'mem0'", specifier = ">=0.1.94" },
{ name = "mem0ai", marker = "extra == 'mem0'", specifier = ">=0.1.29" },
{ name = "openai", specifier = ">=1.13.3" },
{ name = "openpyxl", specifier = ">=3.1.5" },
{ name = "openpyxl", marker = "extra == 'openpyxl'", specifier = ">=3.1.5" },
@@ -875,7 +803,7 @@ dev = [
[[package]]
name = "crewai-tools"
version = "0.42.2"
version = "0.42.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "chromadb" },
@@ -890,9 +818,9 @@ dependencies = [
{ name = "pytube" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/17/34/9e63e2db53d8f5c30353f271a3240687a48e55204bbd176a057c0b7658c8/crewai_tools-0.42.2.tar.gz", hash = "sha256:69365ffb168cccfea970e09b308905aa5007cfec60024d731ffac1362a0153c0", size = 754967 }
sdist = { url = "https://files.pythonhosted.org/packages/c2/dc/94e9cdf83f1ac699855ee0624451e68bb00c217e55edd2fa2200de9a7ba4/crewai_tools-0.42.0.tar.gz", hash = "sha256:cf66d48e5a46ecc0964b60bf985f84c5ae4a0a82c4fe35326d5f706fc4020f71", size = 754588 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4e/43/0f70b95350084e5cb1e1d74e9acb9e18a89ba675b1d579c787c2662baba7/crewai_tools-0.42.2-py3-none-any.whl", hash = "sha256:13727fb68f0efefd21edeb281be3d66ff2f5a3b5029d4e6adef388b11fd5846a", size = 583933 },
{ url = "https://files.pythonhosted.org/packages/ed/2b/2245ee519a7c96988c27d65eb224023d9c204101eea44763e3297d3ba01a/crewai_tools-0.42.0-py3-none-any.whl", hash = "sha256:ee095d5366b96ee1ad2558e350abda098065c20d24afd6afe0e49baaa5a371b7", size = 583065 },
]
[[package]]
@@ -2398,9 +2326,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "tokenizers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a7/61/b974067af7908f19a0391ebda0ebfdff376eb9314c28578a9e60b0b381a7/litellm-1.67.2.tar.gz", hash = "sha256:9e108827bff16af04fd4c35b0c1a1d6c7746c96db3870189a60141d449797487", size = 7257152 }
sdist = { url = "https://files.pythonhosted.org/packages/a7/61/b974067af7908f19a0391ebda0ebfdff376eb9314c28578a9e60b0b381a7/litellm-1.67.2.tar.gz", hash = "sha256:9e108827bff16af04fd4c35b0c1a1d6c7746c96db3870189a60141d449797487", size = 7257152, upload_time = "2025-04-24T05:09:40.762Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/7f/1a7750bf49ff096ec8d924059a1daea7d98dc065a13294a849178a678c02/litellm-1.67.2-py3-none-any.whl", hash = "sha256:32df4d17b3ead17d04793311858965e41e83a7bdf9bd661895c0e6bc9c78dc8b", size = 7626920 },
{ url = "https://files.pythonhosted.org/packages/c2/7f/1a7750bf49ff096ec8d924059a1daea7d98dc065a13294a849178a678c02/litellm-1.67.2-py3-none-any.whl", hash = "sha256:32df4d17b3ead17d04793311858965e41e83a7bdf9bd661895c0e6bc9c78dc8b", size = 7626920, upload_time = "2025-04-24T05:09:37.5Z" },
]
[[package]]
@@ -2596,20 +2524,19 @@ wheels = [
[[package]]
name = "mem0ai"
version = "0.1.94"
version = "0.1.29"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "openai" },
{ name = "posthog" },
{ name = "psycopg2-binary" },
{ name = "pydantic" },
{ name = "pytz" },
{ name = "qdrant-client" },
{ name = "sqlalchemy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f0/c2/f3a61d1f993bca5ecfca6e0d51b94963dc4807ca1929ea3a2a58c7298593/mem0ai-0.1.94.tar.gz", hash = "sha256:9f6754017fc6452aabf66961f759973c87ce0d78d77130448a537f9e38d2d767", size = 93161 }
sdist = { url = "https://files.pythonhosted.org/packages/a9/bf/152718f9da3844dd24d4c45850b2e719798b5ce9389adf4ec873ee8905ca/mem0ai-0.1.29.tar.gz", hash = "sha256:42adefb7a9b241be03fbcabadf5328abf91b4ac390bc97e5966e55e3cac192c5", size = 55201, upload_time = "2024-11-07T06:07:20.661Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/fa/a2043c5a228077423678d9e7e0fe3aab89ab9a625a4f171a30967de1bbb3/mem0ai-0.1.94-py3-none-any.whl", hash = "sha256:862aaccf4ec41d65a5c935dc49c7b8175c96f6b60b29abeda338d8a335027e2c", size = 143077 },
{ url = "https://files.pythonhosted.org/packages/65/9b/755be84f669415b3b513cfd935e768c4c84ac5c1ab6ff6ac2dab990a261a/mem0ai-0.1.29-py3-none-any.whl", hash = "sha256:07bbfd4238d0d7da65d5e4cf75a217eeb5b2829834e399074b05bb046730a57f", size = 79558, upload_time = "2024-11-07T06:07:18.665Z" },
]
[[package]]
@@ -3782,50 +3709,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/05/33/2d74d588408caedd065c2497bdb5ef83ce6082db01289a1e1147f6639802/psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8", size = 249898, upload_time = "2024-01-19T20:47:59.238Z" },
]
[[package]]
name = "psycopg2-binary"
version = "2.9.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7a/81/331257dbf2801cdb82105306042f7a1637cc752f65f2bb688188e0de5f0b/psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f", size = 3043397 },
{ url = "https://files.pythonhosted.org/packages/e7/9a/7f4f2f031010bbfe6a02b4a15c01e12eb6b9b7b358ab33229f28baadbfc1/psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906", size = 3274806 },
{ url = "https://files.pythonhosted.org/packages/e5/57/8ddd4b374fa811a0b0a0f49b6abad1cde9cb34df73ea3348cc283fcd70b4/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92", size = 2851361 },
{ url = "https://files.pythonhosted.org/packages/f9/66/d1e52c20d283f1f3a8e7e5c1e06851d432f123ef57b13043b4f9b21ffa1f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007", size = 3080836 },
{ url = "https://files.pythonhosted.org/packages/a0/cb/592d44a9546aba78f8a1249021fe7c59d3afb8a0ba51434d6610cc3462b6/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0", size = 3264552 },
{ url = "https://files.pythonhosted.org/packages/64/33/c8548560b94b7617f203d7236d6cdf36fe1a5a3645600ada6efd79da946f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4", size = 3019789 },
{ url = "https://files.pythonhosted.org/packages/b0/0e/c2da0db5bea88a3be52307f88b75eec72c4de62814cbe9ee600c29c06334/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1", size = 2871776 },
{ url = "https://files.pythonhosted.org/packages/15/d7/774afa1eadb787ddf41aab52d4c62785563e29949613c958955031408ae6/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5", size = 2820959 },
{ url = "https://files.pythonhosted.org/packages/5e/ed/440dc3f5991a8c6172a1cde44850ead0e483a375277a1aef7cfcec00af07/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5", size = 2919329 },
{ url = "https://files.pythonhosted.org/packages/03/be/2cc8f4282898306732d2ae7b7378ae14e8df3c1231b53579efa056aae887/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53", size = 2957659 },
{ url = "https://files.pythonhosted.org/packages/d0/12/fb8e4f485d98c570e00dad5800e9a2349cfe0f71a767c856857160d343a5/psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b", size = 1024605 },
{ url = "https://files.pythonhosted.org/packages/22/4f/217cd2471ecf45d82905dd09085e049af8de6cfdc008b6663c3226dc1c98/psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1", size = 1163817 },
{ url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397 },
{ url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806 },
{ url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370 },
{ url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780 },
{ url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583 },
{ url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831 },
{ url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822 },
{ url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975 },
{ url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320 },
{ url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617 },
{ url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618 },
{ url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816 },
{ url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 },
{ url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 },
{ url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 },
{ url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 },
{ url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 },
{ url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 },
{ url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 },
{ url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 },
{ url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 },
{ url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 },
{ url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 },
{ url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 },
]
[[package]]
name = "ptyprocess"
version = "0.7.0"