From 4db386d817ce9828b7635049b1d8eb38234e7116 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Moura?= Date: Mon, 16 Sep 2024 14:11:11 -0300 Subject: [PATCH] preparing to cut new version --- poetry.lock | 24 +++++++++--------- pyproject.toml | 2 +- src/crewai/__init__.py | 5 ---- src/crewai/agents/crew_agent_executor.py | 20 ++++++++++++--- src/crewai/project/crew_base.py | 5 +++- src/crewai/telemetry/telemetry.py | 32 +++++++++++++++++------- tests/crew_test.py | 4 +++ 7 files changed, 61 insertions(+), 31 deletions(-) diff --git a/poetry.lock b/poetry.lock index bc0a47fa1..88364ba0e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2826,13 +2826,13 @@ langchain-core = ">=0.2.38,<0.3.0" [[package]] name = "langsmith" -version = "0.1.120" +version = "0.1.121" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.120-py3-none-any.whl", hash = "sha256:54d2785e301646c0988e0a69ebe4d976488c87b41928b358cb153b6ddd8db62b"}, - {file = "langsmith-0.1.120.tar.gz", hash = "sha256:25499ca187b41bd89d784b272b97a8d76f60e0e21bdf20336e8a2aa6a9b23ac9"}, + {file = "langsmith-0.1.121-py3-none-any.whl", hash = "sha256:fdb1ac8a671d3904201bfeea197d87bded46a10d08f1034af464211872e29893"}, + {file = "langsmith-0.1.121.tar.gz", hash = "sha256:e9381b82a5bd484af9a51c3e96faea572746b8d617b070c1cda40cbbe48e33df"}, ] [package.dependencies] @@ -3684,13 +3684,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.45.0" +version = "1.45.1" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.45.0-py3-none-any.whl", hash = "sha256:2f1f7b7cf90f038a9f1c24f0d26c0f1790c102ec5acd07ffd70a9b7feac1ff4e"}, - {file = "openai-1.45.0.tar.gz", hash = "sha256:731207d10637335413aa3c0955f8f8df30d7636a4a0f9c381f2209d32cf8de97"}, + {file = "openai-1.45.1-py3-none-any.whl", hash = "sha256:4a6cce402aec803ae57ae7eff4b5b94bf6c0e1703a8d85541c27243c2adeadf8"}, + {file = "openai-1.45.1.tar.gz", hash = "sha256:f79e384916b219ab2f028bbf9c778e81291c61eb0645ccfa1828a4b18b55d534"}, ] [package.dependencies] @@ -4343,13 +4343,13 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "posthog" -version = "3.6.5" +version = "3.6.6" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.6.5-py2.py3-none-any.whl", hash = "sha256:f8b7c573826b061a1d22c9495169c38ebe83a1df2729f49c7129a9c23a02acf6"}, - {file = "posthog-3.6.5.tar.gz", hash = "sha256:7fd3ca809e15476c35f75d18cd6bba31395daf0a17b75242965c469fb6292510"}, + {file = "posthog-3.6.6-py2.py3-none-any.whl", hash = "sha256:38834fd7f0732582a20d4eb4674c8d5c088e464d14d1b3f8c176e389aecaa4ef"}, + {file = "posthog-3.6.6.tar.gz", hash = "sha256:1e04783293117109189ad7048f3eedbe21caff0e39bee5e2d47a93dd790fefac"}, ] [package.dependencies] @@ -5671,13 +5671,13 @@ files = [ [[package]] name = "setuptools" -version = "75.0.0" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-75.0.0-py3-none-any.whl", hash = "sha256:791ae94f04f78c880b5e614e560dd32d4b4af5d151bd9e7483e3377846caf90a"}, - {file = "setuptools-75.0.0.tar.gz", hash = "sha256:25af69c809d9334cd8e653d385277abeb5a102dca255954005a7092d282575ea"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] diff --git a/pyproject.toml b/pyproject.toml index f53bb7a18..a0aabdf34 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "crewai" -version = "0.56.23" +version = "0.60.0" description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks." authors = ["Joao Moura "] readme = "README.md" diff --git a/src/crewai/__init__.py b/src/crewai/__init__.py index b106284ad..5a553fef3 100644 --- a/src/crewai/__init__.py +++ b/src/crewai/__init__.py @@ -7,10 +7,6 @@ from crewai.routers import Router from crewai.task import Task -warnings.filterwarnings( - "ignore", - message="Valid config keys have changed in V2:", -) warnings.filterwarnings( "ignore", message="Pydantic serializer warnings:", @@ -18,5 +14,4 @@ warnings.filterwarnings( module="pydantic.main", ) - __all__ = ["Agent", "Crew", "Process", "Task", "Pipeline", "Router"] diff --git a/src/crewai/agents/crew_agent_executor.py b/src/crewai/agents/crew_agent_executor.py index edb7131c8..07b3b8570 100644 --- a/src/crewai/agents/crew_agent_executor.py +++ b/src/crewai/agents/crew_agent_executor.py @@ -83,6 +83,8 @@ class CrewAgentExecutor(CrewAgentExecutorMixin): user_prompt = self._format_prompt(self.prompt.get("prompt", ""), inputs) self.messages.append(self._format_msg(user_prompt)) + self._show_start_logs() + self.ask_for_human_input = bool(inputs.get("ask_for_human_input", False)) formatted_answer = self._invoke_loop() @@ -162,6 +164,17 @@ class CrewAgentExecutor(CrewAgentExecutorMixin): self._show_logs(formatted_answer) return formatted_answer + def _show_start_logs(self): + if self.agent.verbose or ( + hasattr(self, "crew") and getattr(self.crew, "verbose", False) + ): + self._printer.print( + content=f"\033[1m\033[95m# Agent:\033[00m \033[1m\033[92m{self.agent.role}\033[00m" + ) + self._printer.print( + content=f"\033[95m## Task:\033[00m \033[92m{self.task.description}\033[00m" + ) + def _show_logs(self, formatted_answer: Union[AgentAction, AgentFinish]): if self.agent.verbose or ( hasattr(self, "crew") and getattr(self.crew, "verbose", False) @@ -176,9 +189,10 @@ class CrewAgentExecutor(CrewAgentExecutorMixin): self._printer.print( content=f"\n\n\033[1m\033[95m# Agent:\033[00m \033[1m\033[92m{self.agent.role}\033[00m" ) - self._printer.print( - content=f"\033[95m## Thought:\033[00m \033[92m{thought}\033[00m" - ) + if thought and thought != "": + self._printer.print( + content=f"\033[95m## Thought:\033[00m \033[92m{thought}\033[00m" + ) self._printer.print( content=f"\033[95m## Using tool:\033[00m \033[92m{formatted_answer.tool}\033[00m" ) diff --git a/src/crewai/project/crew_base.py b/src/crewai/project/crew_base.py index 5e0f154ea..67e85f91b 100644 --- a/src/crewai/project/crew_base.py +++ b/src/crewai/project/crew_base.py @@ -89,7 +89,10 @@ def CrewBase(cls): callbacks: Dict[str, Callable], ) -> None: if llm := agent_info.get("llm"): - self.agents_config[agent_name]["llm"] = llms[llm]() + try: + self.agents_config[agent_name]["llm"] = llms[llm]() + except KeyError: + self.agents_config[agent_name]["llm"] = llm if tools := agent_info.get("tools"): self.agents_config[agent_name]["tools"] = [ diff --git a/src/crewai/telemetry/telemetry.py b/src/crewai/telemetry/telemetry.py index 99a1d9b03..f8edb51a1 100644 --- a/src/crewai/telemetry/telemetry.py +++ b/src/crewai/telemetry/telemetry.py @@ -4,15 +4,28 @@ import asyncio import json import os import platform +import warnings from typing import TYPE_CHECKING, Any, Optional +from contextlib import contextmanager -import pkg_resources -from opentelemetry import trace -from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter -from opentelemetry.sdk.resources import SERVICE_NAME, Resource -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor -from opentelemetry.trace import Span, Status, StatusCode + +@contextmanager +def suppress_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + yield + + +with suppress_warnings(): + import pkg_resources + + +from opentelemetry import trace # noqa: E402 +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter # noqa: E402 +from opentelemetry.sdk.resources import SERVICE_NAME, Resource # noqa: E402 +from opentelemetry.sdk.trace import TracerProvider # noqa: E402 +from opentelemetry.sdk.trace.export import BatchSpanProcessor # noqa: E402 +from opentelemetry.trace import Span, Status, StatusCode # noqa: E402 if TYPE_CHECKING: from crewai.crew import Crew @@ -62,8 +75,9 @@ class Telemetry: def set_tracer(self): if self.ready and not self.trace_set: try: - trace.set_tracer_provider(self.provider) - self.trace_set = True + with suppress_warnings(): + trace.set_tracer_provider(self.provider) + self.trace_set = True except Exception: self.ready = False self.trace_set = False diff --git a/tests/crew_test.py b/tests/crew_test.py index 0326789c5..89ec3dd7d 100644 --- a/tests/crew_test.py +++ b/tests/crew_test.py @@ -429,6 +429,10 @@ def test_crew_verbose_output(capsys): crew.kickoff() captured = capsys.readouterr() expected_strings = [ + "\x1b[1m\x1b[95m# Agent:\x1b[00m \x1b[1m\x1b[92mResearcher", + "\x1b[00m\n\x1b[95m## Task:\x1b[00m \x1b[92mResearch AI advancements.", + "\x1b[1m\x1b[95m# Agent:\x1b[00m \x1b[1m\x1b[92mSenior Writer", + "\x1b[95m## Task:\x1b[00m \x1b[92mWrite about AI in healthcare.", "\n\n\x1b[1m\x1b[95m# Agent:\x1b[00m \x1b[1m\x1b[92mResearcher", "\x1b[00m\n\x1b[95m## Final Answer:", "\n\n\x1b[1m\x1b[95m# Agent:\x1b[00m \x1b[1m\x1b[92mSenior Writer",