fix: Update json and model_dump_json signatures to match BaseModel

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-02-09 19:27:57 +00:00
parent bf6db93bdf
commit f6a65486f1
2 changed files with 28 additions and 10 deletions

View File

@@ -1,5 +1,5 @@
import json
from typing import Any, Dict, Optional, Set, Union
from typing import Any, Callable, Dict, Optional, Set, Union
from pydantic import BaseModel, Field
from typing_extensions import Literal
@@ -27,8 +27,19 @@ class CrewOutput(BaseModel):
)
token_usage: UsageMetrics = Field(description="Processed token summary", default={})
@property
def json(self) -> str:
def json(
self,
*,
include: Optional[IncEx] = None,
exclude: Optional[IncEx] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
encoder: Optional[Callable[[Any], Any]] = None,
models_as_dict: bool = True,
**dumps_kwargs: Any,
) -> str:
"""Get the JSON representation of the output."""
if self.tasks_output and self.tasks_output[-1].output_format != OutputFormat.JSON:
raise ValueError(
@@ -42,14 +53,12 @@ class CrewOutput(BaseModel):
indent: Optional[int] = None,
include: Optional[IncEx] = None,
exclude: Optional[IncEx] = None,
context: Optional[Any] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool | Literal["none", "warn", "error"] = False,
serialize_as_any: bool = False,
) -> str:
"""Override model_dump_json to handle custom JSON output."""
return super().model_dump_json(

View File

@@ -1,5 +1,5 @@
import json
from typing import Any, Dict, Optional, Set, Union
from typing import Any, Callable, Dict, Optional, Set, Union
from pydantic import BaseModel, Field, model_validator
from typing_extensions import Literal
@@ -38,8 +38,19 @@ class TaskOutput(BaseModel):
self.summary = f"{excerpt}..."
return self
@property
def json(self) -> str:
def json(
self,
*,
include: Optional[IncEx] = None,
exclude: Optional[IncEx] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
encoder: Optional[Callable[[Any], Any]] = None,
models_as_dict: bool = True,
**dumps_kwargs: Any,
) -> str:
"""Get the JSON representation of the output."""
if self.output_format != OutputFormat.JSON:
raise ValueError(
@@ -57,14 +68,12 @@ class TaskOutput(BaseModel):
indent: Optional[int] = None,
include: Optional[IncEx] = None,
exclude: Optional[IncEx] = None,
context: Optional[Any] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool | Literal["none", "warn", "error"] = False,
serialize_as_any: bool = False,
) -> str:
"""Override model_dump_json to handle custom JSON output."""
return super().model_dump_json(