diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 37bd8c62d..e46d727c1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -23,8 +23,8 @@ jobs: - name: Install Requirements run: | + set -e pip install poetry - poetry lock && poetry install - name: Run tests diff --git a/.gitignore b/.gitignore index 2aaa1342a..d6333ae4a 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ db/ test.py rc-tests/* *.pkl -temp/* \ No newline at end of file +temp/* +.vscode/* \ No newline at end of file diff --git a/docs/core-concepts/Training-Crew.md b/docs/core-concepts/Training-Crew.md index 3e08b1072..1fae7ff4d 100644 --- a/docs/core-concepts/Training-Crew.md +++ b/docs/core-concepts/Training-Crew.md @@ -8,6 +8,7 @@ The training feature in CrewAI allows you to train your AI agents using the comm During training, CrewAI utilizes techniques to optimize the performance of your agents along with human feedback. This helps the agents improve their understanding, decision-making, and problem-solving abilities. +### Training Your Crew Using the CLI To use the training feature, follow these steps: 1. Open your terminal or command prompt. @@ -18,7 +19,26 @@ To use the training feature, follow these steps: crewai train -n ``` -Replace `` with the desired number of training iterations. This determines how many times the agents will go through the training process. +### Training Your Crew Programmatically +To train your crew programmatically, use the following steps: + +1. Define the number of iterations for training. +2. Specify the input parameters for the training process. +3. Execute the training command within a try-except block to handle potential errors. + +```python + n_iterations = 2 + inputs = {"topic": "CrewAI Training"} + + try: + YourCrewName_Crew().crew().train(n_iterations= n_iterations, inputs=inputs) + + except Exception as e: + raise Exception(f"An error occurred while training the crew: {e}") +``` + +!!! note "Replace `` with the desired number of training iterations. This determines how many times the agents will go through the training process." + ### Key Points to Note: - **Positive Integer Requirement:** Ensure that the number of iterations (`n_iterations`) is a positive integer. The code will raise a `ValueError` if this condition is not met. diff --git a/poetry.lock b/poetry.lock index f75da2851..defbe44fa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,25 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "agentops" +version = "0.1.12" +description = "Python SDK for developing AI agent evals and observability" +optional = true +python-versions = ">=3.7" +files = [ + {file = "agentops-0.1.12-py3-none-any.whl", hash = "sha256:b4b47c990638b74810cc1c38624ada162094b46e3fdd63883642a16bc5258386"}, + {file = "agentops-0.1.12.tar.gz", hash = "sha256:c4f762482fb240fc3503907f52498f2d8d9e4f80236ee4a12bf039317a85fcd7"}, +] + +[package.dependencies] +packaging = "23.2" +psutil = "5.9.8" +requests = "2.31.0" + +[package.extras] +dev = ["pytest (==7.4.0)", "requests-mock (==1.11.0)"] +langchain = ["langchain (>=1.19,<2.0)"] + [[package]] name = "aiohttp" version = "3.9.5" @@ -820,13 +840,13 @@ files = [ [[package]] name = "crewai-tools" -version = "0.4.5" +version = "0.4.6" description = "Set of tools for the crewAI framework" optional = false python-versions = "<=3.13,>=3.10" files = [ - {file = "crewai_tools-0.4.5-py3-none-any.whl", hash = "sha256:d970a152a69d039eb23150755d4dc9e7c6ef9176b19a80348142619518e39b17"}, - {file = "crewai_tools-0.4.5.tar.gz", hash = "sha256:1d7763f20afd95c6be70d31f9f0e9334cb42be127c17feddce368907077a6543"}, + {file = "crewai_tools-0.4.6-py3-none-any.whl", hash = "sha256:bfce205766da58a83233f82ed1f31f931656eb1f9c40b38f947cbf03afdf6e0d"}, + {file = "crewai_tools-0.4.6.tar.gz", hash = "sha256:f4b1ae8b5cf7c4b53da40223eb9734129b5fb71e7a77b676909129512307bd32"}, ] [package.dependencies] @@ -2505,8 +2525,8 @@ files = [ [package.dependencies] orjson = ">=3.9.14,<4.0.0" pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, ] requests = ">=2,<3" @@ -3777,6 +3797,34 @@ files = [ {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "pulsar-client" version = "3.5.0" @@ -3934,8 +3982,8 @@ files = [ annotated-types = ">=0.4.0" pydantic-core = "2.20.0" typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] [package.extras] @@ -4588,13 +4636,13 @@ files = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] @@ -6028,9 +6076,10 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] +agentops = ["agentops"] tools = ["crewai-tools"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<=3.13" -content-hash = "d4ea0d71723ecc2bad629c387dd786b6a96c553b1e3e298516fdfcd2059d1019" +content-hash = "e605a254f66369bdf6dbdad36f7bd58d7d97377ea37c151cccc7f48ef9f96f55" diff --git a/src/crewai/agent.py b/src/crewai/agent.py index 501d538be..45797cc7f 100644 --- a/src/crewai/agent.py +++ b/src/crewai/agent.py @@ -7,16 +7,15 @@ from langchain.tools.render import render_text_description from langchain_core.agents import AgentAction from langchain_core.callbacks import BaseCallbackHandler from langchain_openai import ChatOpenAI - from pydantic import Field, InstanceOf, model_validator from crewai.agents import CacheHandler, CrewAgentExecutor, CrewAgentParser +from crewai.agents.agent_builder.base_agent import BaseAgent from crewai.memory.contextual.contextual_memory import ContextualMemory from crewai.tools.agent_tools import AgentTools -from crewai.utilities import Prompts, Converter +from crewai.utilities import Converter, Prompts from crewai.utilities.constants import TRAINED_AGENTS_DATA_FILE, TRAINING_DATA_FILE from crewai.utilities.token_counter_callback import TokenCalcHandler -from crewai.agents.agent_builder.base_agent import BaseAgent from crewai.utilities.training_handler import CrewTrainingHandler agentops = None @@ -91,7 +90,6 @@ class Agent(BaseAgent): response_template: Optional[str] = Field( default=None, description="Response format for the agent." ) - allow_code_execution: Optional[bool] = Field( default=False, description="Enable code execution for the agent." ) @@ -103,7 +101,7 @@ class Agent(BaseAgent): @model_validator(mode="after") def set_agent_executor(self) -> "Agent": - """Ensure agent executor and token process is set.""" + """Ensure agent executor and token process are set.""" if hasattr(self.llm, "model_name"): token_handler = TokenCalcHandler(self.llm.model_name, self._token_process) diff --git a/src/crewai/agents/agent_builder/base_agent.py b/src/crewai/agents/agent_builder/base_agent.py index 65c9b661c..90e27dec6 100644 --- a/src/crewai/agents/agent_builder/base_agent.py +++ b/src/crewai/agents/agent_builder/base_agent.py @@ -1,23 +1,26 @@ -from copy import deepcopy import uuid -from typing import Any, Dict, List, Optional from abc import ABC, abstractmethod +from copy import copy as shallow_copy +from typing import Any, Dict, List, Optional, TypeVar + from pydantic import ( UUID4, BaseModel, + ConfigDict, Field, InstanceOf, + PrivateAttr, field_validator, model_validator, - ConfigDict, - PrivateAttr, ) from pydantic_core import PydanticCustomError -from crewai.utilities import I18N, RPMController, Logger +from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess from crewai.agents.cache.cache_handler import CacheHandler from crewai.agents.tools_handler import ToolsHandler -from crewai.utilities.token_counter_callback import TokenProcess +from crewai.utilities import I18N, Logger, RPMController + +T = TypeVar("T", bound="BaseAgent") class BaseAgent(ABC, BaseModel): @@ -188,6 +191,31 @@ class BaseAgent(ABC, BaseModel): """Get the converter class for the agent to create json/pydantic outputs.""" pass + def copy(self: T) -> T: + """Create a deep copy of the Agent.""" + exclude = { + "id", + "_logger", + "_rpm_controller", + "_request_within_rpm_limit", + "_token_process", + "agent_executor", + "tools", + "tools_handler", + "cache_handler", + "llm", + } + + # Copy llm and clear callbacks + existing_llm = shallow_copy(self.llm) + existing_llm.callbacks = [] + copied_data = self.model_dump(exclude=exclude) + copied_data = {k: v for k, v in copied_data.items() if v is not None} + + copied_agent = type(self)(**copied_data, llm=existing_llm, tools=self.tools) + + return copied_agent + def interpolate_inputs(self, inputs: Dict[str, Any]) -> None: """Interpolate inputs into the agent description and backstory.""" if self._original_role is None: @@ -217,35 +245,6 @@ class BaseAgent(ABC, BaseModel): def increment_formatting_errors(self) -> None: self.formatting_errors += 1 - def copy(self): - exclude = { - "id", - "_logger", - "_rpm_controller", - "_request_within_rpm_limit", - "token_process", - "agent_executor", - "tools", - "tools_handler", - "cache_handler", - "crew", - "llm", - } - - copied_data = self.model_dump(exclude=exclude, exclude_unset=True) - copied_agent = self.__class__(**copied_data) - - # Copy mutable attributes separately - copied_agent.tools = deepcopy(self.tools) - copied_agent.config = deepcopy(self.config) - - # Preserve original values for interpolation - copied_agent._original_role = self._original_role - copied_agent._original_goal = self._original_goal - copied_agent._original_backstory = self._original_backstory - - return copied_agent - def set_rpm_controller(self, rpm_controller: RPMController) -> None: """Set the rpm controller for the agent. diff --git a/src/crewai/crew.py b/src/crewai/crew.py index 5d0bb98e5..e7967ab99 100644 --- a/src/crewai/crew.py +++ b/src/crewai/crew.py @@ -315,16 +315,13 @@ class Crew(BaseModel): # type: ignore[attr-defined] # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]" agent.crew = self # type: ignore[attr-defined] # TODO: Create an AgentFunctionCalling protocol for future refactoring - if ( - hasattr(agent, "function_calling_llm") - and not agent.function_calling_llm - ): + if not agent.function_calling_llm: agent.function_calling_llm = self.function_calling_llm - if hasattr(agent, "allow_code_execution") and agent.allow_code_execution: + if agent.allow_code_execution: agent.tools += agent.get_code_execution_tools() - if hasattr(agent, "step_callback") and not agent.step_callback: + if not agent.step_callback: agent.step_callback = self.step_callback agent.create_agent_executor() @@ -340,9 +337,7 @@ class Crew(BaseModel): raise NotImplementedError( f"The process '{self.process}' is not implemented yet." ) - metrics = metrics + [ - agent._token_process.get_summary() for agent in self.agents - ] + metrics += [agent._token_process.get_summary() for agent in self.agents] self.usage_metrics = { key: sum([m[key] for m in metrics if m is not None]) for key in metrics[0] @@ -350,21 +345,32 @@ class Crew(BaseModel): return result - def kickoff_for_each(self, inputs: List[Dict[str, Any]]) -> List: + def kickoff_for_each( + self, inputs: List[Dict[str, Any]] + ) -> List[Union[str, Dict[str, Any]]]: """Executes the Crew's workflow for each input in the list and aggregates results.""" results = [] + # Initialize the parent crew's usage metrics + total_usage_metrics = { + "total_tokens": 0, + "prompt_tokens": 0, + "completion_tokens": 0, + "successful_requests": 0, + } + for input_data in inputs: crew = self.copy() - for task in crew.tasks: - task.interpolate_inputs(input_data) - for agent in crew.agents: - agent.interpolate_inputs(input_data) + output = crew.kickoff(inputs=input_data) + + if crew.usage_metrics: + for key in total_usage_metrics: + total_usage_metrics[key] += crew.usage_metrics.get(key, 0) - output = crew.kickoff() results.append(output) + self.usage_metrics = total_usage_metrics return results async def kickoff_async( @@ -374,26 +380,37 @@ class Crew(BaseModel): return await asyncio.to_thread(self.kickoff, inputs) async def kickoff_for_each_async(self, inputs: List[Dict]) -> List[Any]: - async def run_crew(input_data): - crew = self.copy() + crew_copies = [self.copy() for _ in inputs] - for task in crew.tasks: - task.interpolate_inputs(input_data) - for agent in crew.agents: - agent.interpolate_inputs(input_data) + async def run_crew(crew, input_data): + return await crew.kickoff_async(inputs=input_data) - return await crew.kickoff_async() - - tasks = [asyncio.create_task(run_crew(input_data)) for input_data in inputs] + tasks = [ + asyncio.create_task(run_crew(crew_copies[i], inputs[i])) + for i in range(len(inputs)) + ] results = await asyncio.gather(*tasks) + total_usage_metrics = { + "total_tokens": 0, + "prompt_tokens": 0, + "completion_tokens": 0, + "successful_requests": 0, + } + for crew in crew_copies: + if crew.usage_metrics: + for key in total_usage_metrics: + total_usage_metrics[key] += crew.usage_metrics.get(key, 0) + + self.usage_metrics = total_usage_metrics + return results def _run_sequential_process(self) -> str: """Executes tasks sequentially and returns the final output.""" task_output = "" - token_usage = [] + for task in self.tasks: if task.agent.allow_delegation: # type: ignore # Item "None" of "Agent | None" has no attribute "allow_delegation" agents_for_delegation = [ @@ -419,18 +436,16 @@ class Crew(BaseModel): role = task.agent.role if task.agent is not None else "None" self._logger.log("debug", f"== [{role}] Task output: {task_output}\n\n") - token_summ = task.agent._token_process.get_summary() - - token_usage.append(token_summ) if self.output_log_file: self._file_handler.log(agent=role, task=task_output, status="completed") - token_usage_formatted = self.aggregate_token_usage(token_usage) self._finish_execution(task_output) + token_usage = self.calculate_usage_metrics() + # type: ignore # Incompatible return value type (got "tuple[str, Any]", expected "str") - return self._format_output(task_output, token_usage_formatted) + return self._format_output(task_output, token_usage) def _run_hierarchical_process( self, @@ -454,8 +469,9 @@ class Crew(BaseModel): verbose=self.verbose, ) self.manager_agent = manager + task_output = "" - token_usage = [] + for task in self.tasks: self._logger.log("debug", f"Working Agent: {manager.role}") self._logger.log("info", f"Starting Task: {task.description}") @@ -472,9 +488,6 @@ class Crew(BaseModel): task_output = task.execute( agent=manager, context=task_output, tools=manager.tools ) - if hasattr(manager, "_token_process"): - token_summ = manager._token_process.get_summary() - token_usage.append(token_summ) self._logger.log("debug", f"[{manager.role}] Task output: {task_output}") if self.output_log_file: @@ -484,14 +497,10 @@ class Crew(BaseModel): self._finish_execution(task_output) - manager_token_usage = manager._token_process.get_summary() - token_usage.append(manager_token_usage) - token_usage_formatted = self.aggregate_token_usage(token_usage) + # type: ignore # Incompatible return value type (got "tuple[str, Any]", expected "str") + token_usage = self.calculate_usage_metrics() - return self._format_output( - task_output, - token_usage_formatted, - ), manager_token_usage + return self._format_output(task_output, token_usage), token_usage def copy(self): """Create a deep copy of the Crew.""" @@ -506,12 +515,13 @@ class Crew(BaseModel): "_short_term_memory", "_long_term_memory", "_entity_memory", + "_telemetry", "agents", "tasks", } cloned_agents = [agent.copy() for agent in self.agents] - cloned_tasks = [task.copy() for task in self.tasks] + cloned_tasks = [task.copy(cloned_agents) for task in self.tasks] copied_data = self.model_dump(exclude=exclude) copied_data = {k: v for k, v in copied_data.items() if v is not None} @@ -549,6 +559,7 @@ class Crew(BaseModel): Formats the output of the crew execution. If full_output is True, then returned data type will be a dictionary else returned outputs are string """ + if self.full_output: return { # type: ignore # Incompatible return value type (got "dict[str, Sequence[str | TaskOutput | None]]", expected "str") "final_output": output, @@ -569,11 +580,27 @@ class Crew(BaseModel): ) self._telemetry.end_crew(self, output) + def calculate_usage_metrics(self) -> Dict[str, int]: + """Calculates and returns the usage metrics.""" + total_usage_metrics = { + "total_tokens": 0, + "prompt_tokens": 0, + "completion_tokens": 0, + "successful_requests": 0, + } + + for agent in self.agents: + if hasattr(agent, "_token_process"): + token_sum = agent._token_process.get_summary() + for key in total_usage_metrics: + total_usage_metrics[key] += token_sum.get(key, 0) + + if self.manager_agent and hasattr(self.manager_agent, "_token_process"): + token_sum = self.manager_agent._token_process.get_summary() + for key in total_usage_metrics: + total_usage_metrics[key] += token_sum.get(key, 0) + + return total_usage_metrics + def __repr__(self): return f"Crew(id={self.id}, process={self.process}, number_of_agents={len(self.agents)}, number_of_tasks={len(self.tasks)})" - - def aggregate_token_usage(self, token_usage_list: List[Dict[str, Any]]): - return { - key: sum([m[key] for m in token_usage_list if m is not None]) - for key in token_usage_list[0] - } diff --git a/src/crewai/task.py b/src/crewai/task.py index 4d8d8f00e..aba197f04 100644 --- a/src/crewai/task.py +++ b/src/crewai/task.py @@ -2,8 +2,8 @@ import os import re import threading import uuid -from copy import deepcopy -from typing import Any, Dict, List, Optional, Type +from copy import copy +from typing import Any, Dict, List, Optional, Type, Union from langchain_openai import ChatOpenAI from opentelemetry.trace import Span @@ -13,7 +13,9 @@ from pydantic_core import PydanticCustomError from crewai.agents.agent_builder.base_agent import BaseAgent from crewai.tasks.task_output import TaskOutput from crewai.telemetry.telemetry import Telemetry -from crewai.utilities import I18N, ConverterError, Printer +from crewai.utilities.converter import ConverterError +from crewai.utilities.i18n import I18N +from crewai.utilities.printer import Printer from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser @@ -216,7 +218,7 @@ class Task(BaseModel): ) return result - def _execute(self, agent, task, context, tools): + def _execute(self, agent: "BaseAgent", task, context, tools): result = agent.execute_task( task=task, context=context, @@ -274,7 +276,7 @@ class Task(BaseModel): """Increment the delegations counter.""" self.delegations += 1 - def copy(self): + def copy(self, agents: Optional[List["BaseAgent"]] = None) -> "Task": """Create a deep copy of the Task.""" exclude = { "id", @@ -289,8 +291,12 @@ class Task(BaseModel): cloned_context = ( [task.copy() for task in self.context] if self.context else None ) - cloned_agent = self.agent.copy() if self.agent else None - cloned_tools = deepcopy(self.tools) if self.tools else [] + + def get_agent_by_role(role: str) -> Union["BaseAgent", None]: + return next((agent for agent in agents if agent.role == role), None) + + cloned_agent = get_agent_by_role(self.agent.role) if self.agent else None + cloned_tools = copy(self.tools) if self.tools else [] copied_task = Task( **copied_data, @@ -298,6 +304,7 @@ class Task(BaseModel): agent=cloned_agent, tools=cloned_tools, ) + return copied_task def _export_output(self, result: str) -> Any: @@ -329,7 +336,7 @@ class Task(BaseModel): except Exception: pass - # type: ignore # Item "None" of "Agent | None" has no attribute "function_calling_llm" + # type: ignore # Item "None" of "BaseAgent | None" has no attribute "function_calling_llm" llm = getattr(self.agent, "function_calling_llm", None) or self.agent.llm if not self._is_gpt(llm): # type: ignore # Argument "model" to "PydanticSchemaParser" has incompatible type "type[BaseModel] | None"; expected "type[BaseModel]" @@ -355,7 +362,9 @@ class Task(BaseModel): if self.output_file: content = ( # type: ignore # "str" has no attribute "json" - exported_result if not self.output_pydantic else exported_result.json() + exported_result + if not self.output_pydantic + else exported_result.model_dump_json() ) self._save_file(content) diff --git a/src/crewai/translations/en.json b/src/crewai/translations/en.json index f3376e566..3b4771c06 100644 --- a/src/crewai/translations/en.json +++ b/src/crewai/translations/en.json @@ -17,7 +17,7 @@ "task_with_context": "{task}\n\nThis is the context you're working with:\n{context}", "expected_output": "\nThis is the expect criteria for your final answer: {expected_output} \n you MUST return the actual complete content as the final answer, not a summary.", "human_feedback": "You got human feedback on your work, re-avaluate it and give a new Final Answer when ready.\n {human_feedback}", - "getting_input": "This is the agent final answer: {final_answer}\nPlease provide a feedback: " + "getting_input": "This is the agent's final answer: {final_answer}\nPlease provide feedback: " }, "errors": { "force_final_answer": "Tool won't be use because it's time to give your final answer. Don't use tools and just your absolute BEST Final answer.", diff --git a/src/crewai/utilities/evaluators/task_evaluator.py b/src/crewai/utilities/evaluators/task_evaluator.py index 252541b66..04983b07c 100644 --- a/src/crewai/utilities/evaluators/task_evaluator.py +++ b/src/crewai/utilities/evaluators/task_evaluator.py @@ -5,9 +5,9 @@ from pydantic import BaseModel, Field from crewai.utilities import Converter from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser + agentops = None try: - import agentops from agentops import track_agent except ImportError: diff --git a/src/crewai/utilities/logger.py b/src/crewai/utilities/logger.py index b54595c9c..e0c74d5ce 100644 --- a/src/crewai/utilities/logger.py +++ b/src/crewai/utilities/logger.py @@ -1,7 +1,7 @@ from datetime import datetime from crewai.utilities.printer import Printer -from datetime import datetime + class Logger: _printer = Printer() diff --git a/src/crewai/utilities/token_counter_callback.py b/src/crewai/utilities/token_counter_callback.py index fffc9b600..51200f37e 100644 --- a/src/crewai/utilities/token_counter_callback.py +++ b/src/crewai/utilities/token_counter_callback.py @@ -8,18 +8,18 @@ from crewai.agents.agent_builder.utilities.base_token_process import TokenProces class TokenCalcHandler(BaseCallbackHandler): - model: str = "" + model_name: str = "" token_cost_process: TokenProcess - def __init__(self, model, token_cost_process): - self.model = model + def __init__(self, model_name, token_cost_process): + self.model_name = model_name self.token_cost_process = token_cost_process def on_llm_start( self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any ) -> None: try: - encoding = tiktoken.encoding_for_model(self.model) + encoding = tiktoken.encoding_for_model(self.model_name) except KeyError: encoding = tiktoken.get_encoding("cl100k_base") diff --git a/tests/agent_test.py b/tests/agent_test.py index 75e4a04da..a498732d9 100644 --- a/tests/agent_test.py +++ b/tests/agent_test.py @@ -12,7 +12,6 @@ from crewai import Agent, Crew, Task from crewai.agents.cache import CacheHandler from crewai.agents.executor import CrewAgentExecutor from crewai.agents.parser import CrewAgentParser - from crewai.tools.tool_calling import InstructorToolCalling from crewai.tools.tool_usage import ToolUsage from crewai.utilities import RPMController @@ -734,7 +733,7 @@ def test_agent_llm_uses_token_calc_handler_with_llm_has_model_name(): assert len(agent1.llm.callbacks) == 1 assert agent1.llm.callbacks[0].__class__.__name__ == "TokenCalcHandler" - assert agent1.llm.callbacks[0].model == "gpt-4o" + assert agent1.llm.callbacks[0].model_name == "gpt-4o" assert ( agent1.llm.callbacks[0].token_cost_process.__class__.__name__ == "TokenProcess" ) diff --git a/tests/cassettes/test_crew_async_kickoff_for_each_full_ouput.yaml b/tests/cassettes/test_crew_async_kickoff_for_each_full_ouput.yaml new file mode 100644 index 000000000..8a56fb964 --- /dev/null +++ b/tests/cassettes/test_crew_async_kickoff_for_each_full_ouput.yaml @@ -0,0 +1,585 @@ +interactions: +- request: + body: '{"messages": [{"content": "You are dog Researcher. You have a lot of experience + with dog.\nYour personal goal is: Express hot takes on dog.To give my best complete + final answer to the task use the exact following format:\n\nThought: I now can + give a great answer\nFinal Answer: my best complete final answer to the task.\nYour + final answer must be the great and the most complete as possible, it must be + outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent + Task: Give me an analysis around dog.\n\nThis is the expect criteria for your + final answer: 1 bullet point about dog that''s under 15 words. \n you MUST return + the actual complete content as the final answer, not a summary.\n\nBegin! This + is VERY important to you, use the tools available and give your best Final Answer, + your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, br + connection: + - keep-alive + content-length: + - '951' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.34.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.34.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.3 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + I"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + now"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + give"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + great"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + Answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + Dogs"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + are"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + incredibly"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + loyal"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + creatures"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + and"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + make"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + excellent"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + companions"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + + + data: [DONE] + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 89c8b85b1adac009-ATL + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 01 Jul 2024 19:14:38 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=EKr3m8uVLAQymRGlOvcrrYSniXqH.I6nlooc.HtxR58-1719861278-1.0.1.1-ShT9PH0Sv.qvbXjw_BhtziPUPaOIFBxlzXEIk_MXnfJ5PxggSkkaN25IKMZglSd3N2X.U2pWvFwywNQXiXlRnQ; + path=/; expires=Mon, 01-Jul-24 19:44:38 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=NdoJw5c7TqrbsjEH.ABD06WhM3d1BUh2BfsxOwuclSY-1719861278155-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '110' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '16000000' + x-ratelimit-remaining-requests: + - '9998' + x-ratelimit-remaining-tokens: + - '15999741' + x-ratelimit-reset-requests: + - 11ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_266fff38f6e0a997187154e25d6615e8 + status: + code: 200 + message: OK +- request: + body: '{"messages": [{"content": "You are apple Researcher. You have a lot of + experience with apple.\nYour personal goal is: Express hot takes on apple.To + give my best complete final answer to the task use the exact following format:\n\nThought: + I now can give a great answer\nFinal Answer: my best complete final answer to + the task.\nYour final answer must be the great and the most complete as possible, + it must be outcome described.\n\nI MUST use these formats, my job depends on + it!\nCurrent Task: Give me an analysis around apple.\n\nThis is the expect criteria + for your final answer: 1 bullet point about apple that''s under 15 words. \n + you MUST return the actual complete content as the final answer, not a summary.\n\nBegin! + This is VERY important to you, use the tools available and give your best Final + Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, br + connection: + - keep-alive + content-length: + - '961' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.34.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.34.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.3 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + now"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + give"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + great"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":".\n"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + Answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + Apple''s"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + ecosystem"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + fosters"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + seamless"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + integration"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + but"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + limit"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + user"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + flexibility"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + and"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + choice"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + + + data: [DONE] + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 89c8b85b19501351-ATL + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 01 Jul 2024 19:14:38 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=R2LwekshY1B9Z5rF_RyOforiY4rLQreEXx6gOzhnZCI-1719861278-1.0.1.1-y_WShmsjsavKJEOt9Yw8nwjv05e4WdVaZGu8pYcS4z0wF9heVD.0C9W2aQodYxaWIQvkXiPm7y93ma7WCxUdBQ; + path=/; expires=Mon, 01-Jul-24 19:44:38 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=TLDVkWpa_eP62.b4QTrhowr4J_DsXwMZ2nGDaWD4ebU-1719861278245-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '99' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '16000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '15999780' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_39f52bd8b06ab4ada2c16853345eb6dc + status: + code: 200 + message: OK +- request: + body: '{"messages": [{"content": "You are cat Researcher. You have a lot of experience + with cat.\nYour personal goal is: Express hot takes on cat.To give my best complete + final answer to the task use the exact following format:\n\nThought: I now can + give a great answer\nFinal Answer: my best complete final answer to the task.\nYour + final answer must be the great and the most complete as possible, it must be + outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent + Task: Give me an analysis around cat.\n\nThis is the expect criteria for your + final answer: 1 bullet point about cat that''s under 15 words. \n you MUST return + the actual complete content as the final answer, not a summary.\n\nBegin! This + is VERY important to you, use the tools available and give your best Final Answer, + your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, br + connection: + - keep-alive + content-length: + - '951' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.34.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.34.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.3 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + I"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + now"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + give"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + great"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + Answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + Cats"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + are"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + natural"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + hunters"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + with"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + unique"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + personalities"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + and"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + strong"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + territorial"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + instincts"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + + + data: [DONE] + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 89c8b85b1fde4546-ATL + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 01 Jul 2024 19:14:38 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=KdRY51WYOZSedMQIfG_vPmrPdO67._RkSjV0nq.khUk-1719861278-1.0.1.1-r0uJtNVxaGm2OCkQliG.tsX3vekPCDQb2IET3ywu41Igu1Qfz01rhz_WlvKIllsZlXIyd6rvHT7NxLo.UOtD7Q; + path=/; expires=Mon, 01-Jul-24 19:44:38 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=Qqu1FBant56uM9Al0JmDl7QMk9cRcojzFUt8volvWPo-1719861278308-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '156' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '16000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '15999783' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_a0c6e5796af340d6720b9cfd55df703e + status: + code: 200 + message: OK +version: 1 diff --git a/tests/cassettes/test_crew_full_output.yaml b/tests/cassettes/test_crew_full_output.yaml new file mode 100644 index 000000000..285106ea7 --- /dev/null +++ b/tests/cassettes/test_crew_full_output.yaml @@ -0,0 +1,314 @@ +interactions: +- request: + body: '{"messages": [{"content": "You are test role. test backstory\nYour personal + goal is: test goalTo give my best complete final answer to the task use the + exact following format:\n\nThought: I now can give a great answer\nFinal Answer: + my best complete final answer to the task.\nYour final answer must be the great + and the most complete as possible, it must be outcome described.\n\nI MUST use + these formats, my job depends on it!\nCurrent Task: just say hi!\n\nThis is + the expect criteria for your final answer: your greeting \n you MUST return + the actual complete content as the final answer, not a summary.\n\nBegin! This + is VERY important to you, use the tools available and give your best Final Answer, + your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, br + connection: + - keep-alive + content-length: + - '853' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.34.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.34.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.3 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + I"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + now"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + give"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + great"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + Answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":" + Hi"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + + + data: [DONE] + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 89c8c71ccad81823-ATL + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 01 Jul 2024 19:24:42 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=uU.2MR0L4Mv3xs4DzFlWOQLVId1dJXQBlWffhr9mqxU-1719861882-1.0.1.1-JSKN2_O9iYj8QCZjy0IGiunZxvXimz5Kzv5wQJedVua5E6WIl1UvP.wguXbK0cds7ayJReYnR8v8oAN2rmtnNQ; + path=/; expires=Mon, 01-Jul-24 19:54:42 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=yc5Q7WKbO5zoiGNQx86HpHNM3HeXi2HxCxw31lL_UuU-1719861882665-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '86' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '16000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '15999808' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_25d95f35048bf71e28d73fbed6576a6c + status: + code: 200 + message: OK +- request: + body: '{"messages": [{"content": "You are test role. test backstory\nYour personal + goal is: test goalTo give my best complete final answer to the task use the + exact following format:\n\nThought: I now can give a great answer\nFinal Answer: + my best complete final answer to the task.\nYour final answer must be the great + and the most complete as possible, it must be outcome described.\n\nI MUST use + these formats, my job depends on it!\nCurrent Task: just say hello!\n\nThis + is the expect criteria for your final answer: your greeting \n you MUST return + the actual complete content as the final answer, not a summary.\n\nThis is the + context you''re working with:\nHi!\n\nBegin! This is VERY important to you, + use the tools available and give your best Final Answer, your job depends on + it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"], + "stream": true, "temperature": 0.7}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, br + connection: + - keep-alive + content-length: + - '905' + content-type: + - application/json + cookie: + - __cf_bm=uU.2MR0L4Mv3xs4DzFlWOQLVId1dJXQBlWffhr9mqxU-1719861882-1.0.1.1-JSKN2_O9iYj8QCZjy0IGiunZxvXimz5Kzv5wQJedVua5E6WIl1UvP.wguXbK0cds7ayJReYnR8v8oAN2rmtnNQ; + _cfuvid=yc5Q7WKbO5zoiGNQx86HpHNM3HeXi2HxCxw31lL_UuU-1719861882665-0.0.1.1-604800000 + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.34.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.34.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.3 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + I"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + now"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + give"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + great"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + Answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":" + Hello"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + + + data: [DONE] + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 89c8c7202e0f1823-ATL + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 01 Jul 2024 19:24:43 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '82' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '16000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '15999794' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_754b5067e8f56d5c1182dc0f57be0e45 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/cassettes/test_crew_full_ouput.yaml b/tests/cassettes/test_crew_kickoff_for_each_full_ouput.yaml similarity index 99% rename from tests/cassettes/test_crew_full_ouput.yaml rename to tests/cassettes/test_crew_kickoff_for_each_full_ouput.yaml index 538191f70..e0dfb1b63 100644 --- a/tests/cassettes/test_crew_full_ouput.yaml +++ b/tests/cassettes/test_crew_kickoff_for_each_full_ouput.yaml @@ -400126,7 +400126,7 @@ interactions: Content-Type: - application/octet-stream Date: - - Wed, 26 Jun 2024 00:47:57 GMT + - Sun, 23 Jun 2024 14:23:37 GMT ETag: - '0x8DC736E200CD241' Last-Modified: @@ -400138,128 +400138,24 @@ interactions: x-ms-lease-status: - unlocked x-ms-request-id: - - 8134b189-f01e-002a-4362-c7d69f000000 + - fcf86155-f01e-0015-1278-c51e3c000000 x-ms-version: - '2009-09-19' status: code: 200 message: OK - request: - body: !!binary | - CqIhCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS+SAKEgoQY3Jld2FpLnRl - bGVtZXRyeRKTAQoQeFaYfr5OxyZaJbtlPmhykRIIyBZe8r8vT78qClRvb2wgVXNhZ2UwATmAmXe6 - bGfcF0FgRXi6bGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJKHwoJdG9vbF9uYW1lEhIK - EGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABKTAQoQDWl8VO5eDhpC - 8WTRhE7lIxIIjCV/tIXBg+gqClRvb2wgVXNhZ2UwATmQ09a7bGfcF0EYZNe7bGfcF0oaCg5jcmV3 - YWlfdmVyc2lvbhIICgYwLjMyLjJKHwoJdG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoI - YXR0ZW1wdHMSAhgBegIYAYUBAAEAABKTAQoQdZyAk3NfWDJ95Rl9fwm9JBIIeOLAjtRHX8YqClRv - b2wgVXNhZ2UwATn4LiPAbGfcF0Hw1iPAbGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJK - HwoJdG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEA - ABLQAQoQ9B/WTcQbD55N9my9PwwJ/RII61GsN4ZK8lsqEFRvb2wgVXNhZ2UgRXJyb3IwATn4XSTC - bGfcF0F4GSXCbGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJKZgoDbGxtEl8KXXsibmFt - ZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC00LTAxMjUtcHJldmlldyIsICJ0ZW1wZXJhdHVy - ZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAGFAQABAAAS0AEKEN0yjmyVck//dsMA - AJt7gVsSCE64+GH0u+SCKhBUb29sIFVzYWdlIEVycm9yMAE5iIgTw2xn3BdBODwUw2xn3BdKGgoO - Y3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAibW9kZWxf - bmFtZSI6ICJncHQtNC0wMTI1LXByZXZpZXciLCAidGVtcGVyYXR1cmUiOiAwLjcsICJjbGFzcyI6 - ICJDaGF0T3BlbkFJIn16AhgBhQEAAQAAEtABChDHSt0G/EqmGlI1IdhjDtotEgj83Lu7mYSDsyoQ - VG9vbCBVc2FnZSBFcnJvcjABOZCYGMRsZ9wXQXBEGcRsZ9wXShoKDmNyZXdhaV92ZXJzaW9uEggK - BjAuMzIuMkpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTQtMDEy - NS1wcmV2aWV3IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIY - AYUBAAEAABJoChBpV7cx2tTNYorpmU9Iezq6EgjT9Wm5SX16tioQVG9vbCBVc2FnZSBFcnJvcjAB - ObDiWsdsZ9wXQfB+W8dsZ9wXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzIuMnoCGAGFAQABAAAS - kwEKECe14QwK8YEX5k/HAyTJJOESCLC5kWxi9XjbKgpUb29sIFVzYWdlMAE5AHmbyGxn3BdBcA2c - yGxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh8KCXRvb2xfbmFtZRISChBnZXRfZmlu - YWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAASkwEKECkLzxPcPFShQTRBKOuA5VwS - CEXEpf9vgT0qKgpUb29sIFVzYWdlMAE5uIX9yWxn3BdBiAr+yWxn3BdKGgoOY3Jld2FpX3ZlcnNp - b24SCAoGMC4zMi4ySh8KCXRvb2xfbmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRz - EgIYAXoCGAGFAQABAAASkwEKELKxl/VCmmnFuHppNDaUdLYSCMxTFAfY7LKZKgpUb29sIFVzYWdl - MAE5cEBDy2xn3BdBKMlDy2xn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh8KCXRvb2xf - bmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAASaAoQWsd1 - ufYSiTjFfOLUvTXiGhIIUqeG6O/KOEsqEFRvb2wgVXNhZ2UgRXJyb3IwATnonMXObGfcF0FANcbO - bGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJ6AhgBhQEAAQAAEpwBChDePaRaxJbHM9ti - mgjFTw4eEgiJJ6aTsD4j1CoKVG9vbCBVc2FnZTABObD+Q9FsZ9wXQSCTRNFsZ9wXShoKDmNyZXdh - aV92ZXJzaW9uEggKBjAuMzIuMkooCgl0b29sX25hbWUSGwoZRGVsZWdhdGUgd29yayB0byBjb3dv - cmtlckoOCghhdHRlbXB0cxICGAF6AhgBhQEAAQAAEpsBChB8AxpI4Iiqli5YpSzNuAHkEgiuoKKr - AonvqioKVG9vbCBVc2FnZTABOQhPr9ZsZ9wXQRjzr9ZsZ9wXShoKDmNyZXdhaV92ZXJzaW9uEggK - BjAuMzIuMkonCgl0b29sX25hbWUSGgoYQXNrIHF1ZXN0aW9uIHRvIGNvd29ya2VySg4KCGF0dGVt - cHRzEgIYAXoCGAGFAQABAAASkQEKEHWST7FZaG5Ij0kYfqRwQFISCGo4iAiKiKriKgpUb29sIFVz - YWdlMAE5qKLl2Wxn3BdBuEbm2Wxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0KCXRv - b2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAF6AhgBhQEAAQAAEpEBChBW - s1jZuOvGnPk5qZAn/3bvEghcLUd+91YchSoKVG9vbCBVc2FnZTABOVBUJttsZ9wXQfDgJttsZ9wX - ShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzIuMkodCgl0b29sX25hbWUSEAoObGVhcm5fYWJvdXRf - QUlKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABJoChB390oxWJKggBM1q5uqyAIWEgilZ4dcV9H3 - zyoQVG9vbCBVc2FnZSBFcnJvcjABObCnTtxsZ9wXQYAsT9xsZ9wXShoKDmNyZXdhaV92ZXJzaW9u - EggKBjAuMzIuMnoCGAGFAQABAAAS+QEKEGWFgsouWvCkBbw9hOp41fkSCHuQEq4LICdiKgpUb29s - IFVzYWdlMAE5aLsF4Wxn3BdBcIoG4Wxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0K - CXRvb2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAFKZgoDbGxtEl8KXXsi - bmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC0zLjUtdHVyYm8tMDEyNSIsICJ0ZW1wZXJh - dHVyZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAGFAQABAAAS+QEKEH74JqRDD3nP - pYFzHXVuRBMSCOVLhi1+G6VtKgpUb29sIFVzYWdlMAE5GDRy4mxn3BdB+N9y4mxn3BdKGgoOY3Jl - d2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0KCXRvb2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghh - dHRlbXB0cxICGAFKZgoDbGxtEl8KXXsibmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC0z - LjUtdHVyYm8tMDEyNSIsICJ0ZW1wZXJhdHVyZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUki - fXoCGAGFAQABAAAS+QEKELEPPr1EMWxY9eCI/9VxYRISCBifMoJVgs46KgpUb29sIFVzYWdlMAE5 - WMIV5Gxn3BdBOG4W5Gxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0KCXRvb2xfbmFt - ZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAFKZgoDbGxtEl8KXXsibmFtZSI6IG51 - bGwsICJtb2RlbF9uYW1lIjogImdwdC0zLjUtdHVyYm8tMDEyNSIsICJ0ZW1wZXJhdHVyZSI6IDAu - NywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAGFAQABAAASnAEKEIj74gWv+Pwo4caZDgnygaYS - CETmyodLQY8/KgpUb29sIFVzYWdlMAE5sBTAFm1n3BdBANjAFm1n3BdKGgoOY3Jld2FpX3ZlcnNp - b24SCAoGMC4zMi4ySigKCXRvb2xfbmFtZRIbChlEZWxlZ2F0ZSB3b3JrIHRvIGNvd29ya2VySg4K - CGF0dGVtcHRzEgIYAXoCGAGFAQABAAASnAEKEDq45gMODvysUf5wQQf4F+0SCHZHV4/9LnhnKgpU - b29sIFVzYWdlMAE5EDHQIG1n3BdBwOTQIG1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4y - SigKCXRvb2xfbmFtZRIbChlEZWxlZ2F0ZSB3b3JrIHRvIGNvd29ya2VySg4KCGF0dGVtcHRzEgIY - AXoCGAGFAQABAAASnAEKEJHNsGUrqNbXyAOQPZnty9kSCAASe0iLg1zKKgpUb29sIFVzYWdlMAE5 - kCpPLW1n3BdBmPlPLW1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySigKCXRvb2xfbmFt - ZRIbChlEZWxlZ2F0ZSB3b3JrIHRvIGNvd29ya2VySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAAS - jQEKEP5ZHkp/aYaAyGuu53Anw2kSCDu8tanZxniRKgpUb29sIFVzYWdlMAE5iFVHWG1n3BdBUAVI - WG1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4yShkKCXRvb2xfbmFtZRIMCgptdWx0aXBs - aWVySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAASjQEKEFPBMl1rYMxgfMdCym2d+yUSCIu9WYA8 - 9juvKgpUb29sIFVzYWdlMAE5YK3oWW1n3BdB6D3pWW1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoG - MC4zMi4yShkKCXRvb2xfbmFtZRIMCgptdWx0aXBsaWVySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQAB - AAASaAoQCQJWjJpe1L2HUfWpwOVCVRIIb/BALasmjQEqEFRvb2wgVXNhZ2UgRXJyb3IwATmAGdJc - bWfcF0F4wdJcbWfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJ6AhgBhQEAAQAAEmgKED/Z - tmI7I3o+4n/FgbLHhtMSCGEtw+VqWPc+KhBUb29sIFVzYWdlIEVycm9yMAE5aDyyXW1n3BdBIMWy - XW1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4yegIYAYUBAAEAAA== - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate, br - Connection: - - keep-alive - Content-Length: - - '4261' - Content-Type: - - application/x-protobuf - User-Agent: - - OTel-OTLP-Exporter-Python/1.25.0 - method: POST - uri: https://telemetry.crewai.com:4319/v1/traces - response: - body: - string: "\n\0" - headers: - Content-Length: - - '2' - Content-Type: - - application/x-protobuf - Date: - - Wed, 26 Jun 2024 00:47:59 GMT - status: - code: 200 - message: OK -- request: - body: '{"messages": [{"content": "You are test role. test backstory\nYour personal - goal is: test goalTo give my best complete final answer to the task use the - exact following format:\n\nThought: I now can give a great answer\nFinal Answer: - my best complete final answer to the task.\nYour final answer must be the great - and the most complete as possible, it must be outcome described.\n\nI MUST use - these formats, my job depends on it!\nCurrent Task: just say hi!\n\nThis is - the expect criteria for your final answer: your greeting \n you MUST return - the actual complete content as the final answer, not a summary.\n\nBegin! This - is VERY important to you, use the tools available and give your best Final Answer, - your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + body: '{"messages": [{"content": "You are {topic} Researcher. You have a lot of + experience with {topic}.\nYour personal goal is: Express hot takes on {topic}.To + give my best complete final answer to the task use the exact following format:\n\nThought: + I now can give a great answer\nFinal Answer: my best complete final answer to + the task.\nYour final answer must be the great and the most complete as possible, + it must be outcome described.\n\nI MUST use these formats, my job depends on + it!\nCurrent Task: Give me an analysis around dog.\n\nThis is the expect criteria + for your final answer: 1 bullet point about dog that''s under 15 words. \n you + MUST return the actual complete content as the final answer, not a summary.\n\nBegin! + This is VERY important to you, use the tools available and give your best Final + Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' headers: accept: @@ -400269,13 +400165,13 @@ interactions: connection: - keep-alive content-length: - - '853' + - '963' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.35.3 + - OpenAI/Python 1.34.0 x-stainless-arch: - arm64 x-stainless-async: @@ -400285,73 +400181,116 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.35.3 + - 1.34.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.11.9 + - 3.12.3 method: POST uri: https://api.openai.com/v1/chat/completions response: body: - string: 'data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + string: 'data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" now"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" give"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" great"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" answer"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" Answer"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" - Hi"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + Dogs"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + are"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + loyal"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + companions"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + and"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + enhance"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + human"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + well"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"-being"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + through"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + emotional"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + support"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} data: [DONE] @@ -400362,20 +400301,20 @@ interactions: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8999306a3c2b97ff-SJC + - 8985231e8e72453b-ATL Connection: - keep-alive Content-Type: - text/event-stream; charset=utf-8 Date: - - Wed, 26 Jun 2024 00:48:00 GMT + - Sun, 23 Jun 2024 14:23:39 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=cZ1tqEJHf90QH5Wpf8u5w22pMiXEVmHqh.l_FNQzPUE-1719362880-1.0.1.1-ukAMHHbJtb1dXgdiJ9rZPnK9Dr9955CracMEW21Vc4n80TvL8Aj_Ke_B7G85jBPWrNAAQ0LYFLomosxQMd_qSg; - path=/; expires=Wed, 26-Jun-24 01:18:00 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=eo7Wv_7IoFobCr06vVNbn6fiTV000XuWJ5mG6A5XfCI-1719152619-1.0.1.1-VWt6JrnrLWxl1Heg2Mc9q1an5j9.JHISIS2VP5qbC_YCwRn5WkI_QykzIBPo8kil7ndx45QBS0fUvVDo22tWKQ; + path=/; expires=Sun, 23-Jun-24 14:53:39 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=jB8ShrBkfWJf2HStRWtCrxPo9TmM70i91UOQVSXB_ik-1719362880048-0.0.1.1-604800000; + - _cfuvid=zHfpY3hveQ2EtdkjxmAEpZPcSMSo.R1IXEnggmqkQEI-1719152619731-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked @@ -400384,41 +400323,41 @@ interactions: openai-organization: - crewai-iuxna1 openai-processing-ms: - - '766' + - '115' openai-version: - '2020-10-01' strict-transport-security: - - max-age=31536000; includeSubDomains + - max-age=15724800; includeSubDomains x-ratelimit-limit-requests: - '10000' x-ratelimit-limit-tokens: - - '16000000' + - '12000000' x-ratelimit-remaining-requests: - '9999' x-ratelimit-remaining-tokens: - - '15999808' + - '11999780' x-ratelimit-reset-requests: - 6ms x-ratelimit-reset-tokens: - - 0s + - 1ms x-request-id: - - e2e310e93ef808475e7ac73c1ef60ec0 + - req_6e1809edb78fae06d8a20ed070c6e64a status: code: 200 message: OK - request: - body: '{"messages": [{"content": "You are test role. test backstory\nYour personal - goal is: test goalTo give my best complete final answer to the task use the - exact following format:\n\nThought: I now can give a great answer\nFinal Answer: - my best complete final answer to the task.\nYour final answer must be the great - and the most complete as possible, it must be outcome described.\n\nI MUST use - these formats, my job depends on it!\nCurrent Task: just say hello!\n\nThis - is the expect criteria for your final answer: your greeting \n you MUST return - the actual complete content as the final answer, not a summary.\n\nThis is the - context you''re working with:\nHi!\n\nBegin! This is VERY important to you, - use the tools available and give your best Final Answer, your job depends on - it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"], - "stream": true, "temperature": 0.7}' + body: '{"messages": [{"content": "You are {topic} Researcher. You have a lot of + experience with {topic}.\nYour personal goal is: Express hot takes on {topic}.To + give my best complete final answer to the task use the exact following format:\n\nThought: + I now can give a great answer\nFinal Answer: my best complete final answer to + the task.\nYour final answer must be the great and the most complete as possible, + it must be outcome described.\n\nI MUST use these formats, my job depends on + it!\nCurrent Task: Give me an analysis around cat.\n\nThis is the expect criteria + for your final answer: 1 bullet point about cat that''s under 15 words. \n you + MUST return the actual complete content as the final answer, not a summary.\n\nBegin! + This is VERY important to you, use the tools available and give your best Final + Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' headers: accept: - application/json @@ -400427,16 +400366,16 @@ interactions: connection: - keep-alive content-length: - - '905' + - '963' content-type: - application/json cookie: - - __cf_bm=cZ1tqEJHf90QH5Wpf8u5w22pMiXEVmHqh.l_FNQzPUE-1719362880-1.0.1.1-ukAMHHbJtb1dXgdiJ9rZPnK9Dr9955CracMEW21Vc4n80TvL8Aj_Ke_B7G85jBPWrNAAQ0LYFLomosxQMd_qSg; - _cfuvid=jB8ShrBkfWJf2HStRWtCrxPo9TmM70i91UOQVSXB_ik-1719362880048-0.0.1.1-604800000 + - __cf_bm=eo7Wv_7IoFobCr06vVNbn6fiTV000XuWJ5mG6A5XfCI-1719152619-1.0.1.1-VWt6JrnrLWxl1Heg2Mc9q1an5j9.JHISIS2VP5qbC_YCwRn5WkI_QykzIBPo8kil7ndx45QBS0fUvVDo22tWKQ; + _cfuvid=zHfpY3hveQ2EtdkjxmAEpZPcSMSo.R1IXEnggmqkQEI-1719152619731-0.0.1.1-604800000 host: - api.openai.com user-agent: - - OpenAI/Python 1.35.3 + - OpenAI/Python 1.34.0 x-stainless-arch: - arm64 x-stainless-async: @@ -400446,73 +400385,116 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.35.3 + - 1.34.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.11.9 + - 3.12.3 method: POST uri: https://api.openai.com/v1/chat/completions response: body: - string: 'data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + string: 'data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" now"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" give"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" great"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" answer"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" Answer"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":" - Hello"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + Cats"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + are"},"logprobs":null,"finish_reason":null}]} - data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + independent"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + curious"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + creatures"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + with"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + strong"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + sense"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + of"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + territory"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} data: [DONE] @@ -400523,13 +400505,13 @@ interactions: CF-Cache-Status: - DYNAMIC CF-RAY: - - 89993072097897ff-SJC + - 898523253f8c453b-ATL Connection: - keep-alive Content-Type: - text/event-stream; charset=utf-8 Date: - - Wed, 26 Jun 2024 00:48:00 GMT + - Sun, 23 Jun 2024 14:23:40 GMT Server: - cloudflare Transfer-Encoding: @@ -400539,26 +400521,230 @@ interactions: openai-organization: - crewai-iuxna1 openai-processing-ms: - - '134' + - '121' openai-version: - '2020-10-01' strict-transport-security: - - max-age=31536000; includeSubDomains + - max-age=15724800; includeSubDomains x-ratelimit-limit-requests: - '10000' x-ratelimit-limit-tokens: - - '16000000' + - '12000000' x-ratelimit-remaining-requests: - '9999' x-ratelimit-remaining-tokens: - - '15999794' + - '11999779' x-ratelimit-reset-requests: - 6ms x-ratelimit-reset-tokens: - - 0s + - 1ms x-request-id: - - 95f7e7603df593de5f778bfbd77838c9 + - req_fcdeaba57c8c0b5c80fdaaf4f3949da3 status: code: 200 message: OK -version: 1 \ No newline at end of file +- request: + body: '{"messages": [{"content": "You are {topic} Researcher. You have a lot of + experience with {topic}.\nYour personal goal is: Express hot takes on {topic}.To + give my best complete final answer to the task use the exact following format:\n\nThought: + I now can give a great answer\nFinal Answer: my best complete final answer to + the task.\nYour final answer must be the great and the most complete as possible, + it must be outcome described.\n\nI MUST use these formats, my job depends on + it!\nCurrent Task: Give me an analysis around apple.\n\nThis is the expect criteria + for your final answer: 1 bullet point about apple that''s under 15 words. \n + you MUST return the actual complete content as the final answer, not a summary.\n\nBegin! + This is VERY important to you, use the tools available and give your best Final + Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", + "n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, br + connection: + - keep-alive + content-length: + - '967' + content-type: + - application/json + cookie: + - __cf_bm=eo7Wv_7IoFobCr06vVNbn6fiTV000XuWJ5mG6A5XfCI-1719152619-1.0.1.1-VWt6JrnrLWxl1Heg2Mc9q1an5j9.JHISIS2VP5qbC_YCwRn5WkI_QykzIBPo8kil7ndx45QBS0fUvVDo22tWKQ; + _cfuvid=zHfpY3hveQ2EtdkjxmAEpZPcSMSo.R1IXEnggmqkQEI-1719152619731-0.0.1.1-604800000 + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.34.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.34.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.3 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + I"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + now"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + can"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + give"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + great"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":".\n"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + Answer"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + Apple"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + revolution"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"ized"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + technology"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + with"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + the"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + i"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Phone"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + transforming"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + communication"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + and"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + media"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":" + consumption"},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} + + + data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + + + data: [DONE] + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 8985232a4e71453b-ATL + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Sun, 23 Jun 2024 14:23:41 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '108' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=15724800; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '12000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '11999779' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 1ms + x-request-id: + - req_d8c11c52c26f88d19413eb84f91587da + status: + code: 200 + message: OK +version: 1 diff --git a/tests/crew_test.py b/tests/crew_test.py index 5fe9bc1b5..cbf07c649 100644 --- a/tests/crew_test.py +++ b/tests/crew_test.py @@ -360,7 +360,7 @@ def test_api_calls_throttling(capsys): @pytest.mark.vcr(filter_headers=["authorization"]) -def test_crew_full_ouput(): +def test_crew_full_output(): agent = Agent( role="test role", goal="test goal", @@ -388,14 +388,94 @@ def test_crew_full_ouput(): "final_output": "Hello!", "tasks_outputs": [task1.output, task2.output], "usage_metrics": { - "total_tokens": 517, - "prompt_tokens": 466, - "completion_tokens": 51, - "successful_requests": 3, + "total_tokens": 348, + "prompt_tokens": 314, + "completion_tokens": 34, + "successful_requests": 2, }, } +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_crew_kickoff_for_each_full_ouput(): + inputs = [ + {"topic": "dog"}, + {"topic": "cat"}, + {"topic": "apple"}, + ] + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + crew = Crew(agents=[agent], tasks=[task], full_output=True) + results = crew.kickoff_for_each(inputs=inputs) + + assert len(results) == len(inputs) + for result in results: + assert "usage_metrics" in result + assert isinstance(result["usage_metrics"], dict) + + # Assert that all required keys are in usage_metrics and their values are not None + for key in [ + "total_tokens", + "prompt_tokens", + "completion_tokens", + "successful_requests", + ]: + assert key in result["usage_metrics"] + assert result["usage_metrics"][key] > 0 + + +@pytest.mark.vcr(filter_headers=["authorization"]) +@pytest.mark.asyncio +async def test_crew_async_kickoff_for_each_full_ouput(): + inputs = [ + {"topic": "dog"}, + {"topic": "cat"}, + {"topic": "apple"}, + ] + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + crew = Crew(agents=[agent], tasks=[task], full_output=True) + results = await crew.kickoff_for_each_async(inputs=inputs) + + assert len(results) == len(inputs) + for result in results: + assert "usage_metrics" in result + assert isinstance(result["usage_metrics"], dict) + + # Assert that all required keys are in usage_metrics and their values are not None + for key in [ + "total_tokens", + "prompt_tokens", + "completion_tokens", + "successful_requests", + ]: + assert key in result["usage_metrics"] + # TODO: FIX THIS WHEN USAGE METRICS ARE RE-DONE + # assert result["usage_metrics"][key] > 0 + + def test_agents_rpm_is_never_set_if_crew_max_RPM_is_not_set(): agent = Agent( role="test role", @@ -468,6 +548,262 @@ def test_async_task_execution(): join.assert_called() +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_kickoff_for_each_single_input(): + """Tests if kickoff_for_each works with a single input.""" + from unittest.mock import patch + + inputs = [{"topic": "dog"}] + expected_outputs = ["Dogs are loyal companions and popular pets."] + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + with patch.object(Agent, "execute_task") as mock_execute_task: + mock_execute_task.side_effect = expected_outputs + crew = Crew(agents=[agent], tasks=[task]) + results = crew.kickoff_for_each(inputs=inputs) + + assert len(results) == 1 + assert results == expected_outputs + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_kickoff_for_each_multiple_inputs(): + """Tests if kickoff_for_each works with multiple inputs.""" + from unittest.mock import patch + + inputs = [ + {"topic": "dog"}, + {"topic": "cat"}, + {"topic": "apple"}, + ] + expected_outputs = [ + "Dogs are loyal companions and popular pets.", + "Cats are independent and low-maintenance pets.", + "Apples are a rich source of dietary fiber and vitamin C.", + ] + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + with patch.object(Agent, "execute_task") as mock_execute_task: + mock_execute_task.side_effect = expected_outputs + crew = Crew(agents=[agent], tasks=[task]) + results = crew.kickoff_for_each(inputs=inputs) + + assert len(results) == len(inputs) + for i, res in enumerate(results): + assert res == expected_outputs[i] + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_kickoff_for_each_empty_input(): + """Tests if kickoff_for_each handles an empty input list.""" + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + crew = Crew(agents=[agent], tasks=[task]) + results = crew.kickoff_for_each(inputs=[]) + assert results == [] + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_kickoff_for_each_invalid_input(): + """Tests if kickoff_for_each raises TypeError for invalid input types.""" + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + crew = Crew(agents=[agent], tasks=[task]) + + with pytest.raises(TypeError): + # Pass a string instead of a list + crew.kickoff_for_each("invalid input") + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_kickoff_for_each_error_handling(): + """Tests error handling in kickoff_for_each when kickoff raises an error.""" + from unittest.mock import patch + + inputs = [ + {"topic": "dog"}, + {"topic": "cat"}, + {"topic": "apple"}, + ] + expected_outputs = [ + "Dogs are loyal companions and popular pets.", + "Cats are independent and low-maintenance pets.", + "Apples are a rich source of dietary fiber and vitamin C.", + ] + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + crew = Crew(agents=[agent], tasks=[task]) + + with patch.object(Crew, "kickoff") as mock_kickoff: + mock_kickoff.side_effect = expected_outputs[:2] + [ + Exception("Simulated kickoff error") + ] + with pytest.raises(Exception, match="Simulated kickoff error"): + crew.kickoff_for_each(inputs=inputs) + + +@pytest.mark.vcr(filter_headers=["authorization"]) +@pytest.mark.asyncio +async def test_kickoff_async_basic_functionality_and_output(): + """Tests the basic functionality and output of kickoff_async.""" + from unittest.mock import patch + + inputs = {"topic": "dog"} + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + # Create the crew + crew = Crew( + agents=[agent], + tasks=[task], + ) + + expected_output = "This is a sample output from kickoff." + with patch.object(Crew, "kickoff", return_value=expected_output) as mock_kickoff: + result = await crew.kickoff_async(inputs) + + assert isinstance(result, str), "Result should be a string" + assert result == expected_output, "Result should match expected output" + mock_kickoff.assert_called_once_with(inputs) + + +@pytest.mark.vcr(filter_headers=["authorization"]) +@pytest.mark.asyncio +async def test_async_kickoff_for_each_async_basic_functionality_and_output(): + """Tests the basic functionality and output of akickoff_for_each_async.""" + from unittest.mock import patch + + inputs = [ + {"topic": "dog"}, + {"topic": "cat"}, + {"topic": "apple"}, + ] + + # Define expected outputs for each input + expected_outputs = [ + "Dogs are loyal companions and popular pets.", + "Cats are independent and low-maintenance pets.", + "Apples are a rich source of dietary fiber and vitamin C.", + ] + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + with patch.object( + Crew, "kickoff_async", side_effect=expected_outputs + ) as mock_kickoff_async: + crew = Crew(agents=[agent], tasks=[task]) + + results = await crew.kickoff_for_each_async(inputs) + + assert len(results) == len(inputs) + assert results == expected_outputs + for input_data in inputs: + mock_kickoff_async.assert_any_call(inputs=input_data) + + +@pytest.mark.vcr(filter_headers=["authorization"]) +@pytest.mark.asyncio +async def test_async_kickoff_for_each_async_empty_input(): + """Tests if akickoff_for_each_async handles an empty input list.""" + + agent = Agent( + role="{topic} Researcher", + goal="Express hot takes on {topic}.", + backstory="You have a lot of experience with {topic}.", + ) + + task = Task( + description="Give me an analysis around {topic}.", + expected_output="1 bullet point about {topic} that's under 15 words.", + agent=agent, + ) + + # Create the crew + crew = Crew( + agents=[agent], + tasks=[task], + ) + + # Call the function we are testing + results = await crew.kickoff_for_each_async([]) + + # Assertion + assert results == [], "Result should be an empty list when input is empty" + + def test_set_agents_step_callback(): from unittest.mock import patch @@ -738,10 +1074,10 @@ def test_agent_usage_metrics_are_captured_for_hierarchical_process(): assert result == '"Howdy!"' assert crew.usage_metrics == { - "total_tokens": 1616, - "prompt_tokens": 1333, - "completion_tokens": 283, - "successful_requests": 3, + "total_tokens": 1927, + "prompt_tokens": 1557, + "completion_tokens": 370, + "successful_requests": 4, }