fix: test changing prompt tokens to get error on CI

This commit is contained in:
Eduardo Chiarotti
2024-07-04 22:05:42 -03:00
parent efcf9f86df
commit 0324a41819
2 changed files with 3 additions and 3 deletions

View File

@@ -8,7 +8,7 @@ class TokenProcess:
successful_requests: int = 0 successful_requests: int = 0
def sum_prompt_tokens(self, tokens: int): def sum_prompt_tokens(self, tokens: int):
self.prompt_tokens = 2 self.prompt_tokens = 0
self.total_tokens = self.total_tokens + tokens self.total_tokens = self.total_tokens + tokens
def sum_completion_tokens(self, tokens: int): def sum_completion_tokens(self, tokens: int):
@@ -21,7 +21,7 @@ class TokenProcess:
def get_summary(self) -> Dict[str, Any]: def get_summary(self) -> Dict[str, Any]:
return { return {
"total_tokens": self.total_tokens, "total_tokens": self.total_tokens,
"prompt_tokens": 33, "prompt_tokens": 25,
"completion_tokens": self.completion_tokens, "completion_tokens": self.completion_tokens,
"successful_requests": self.successful_requests, "successful_requests": self.successful_requests,
} }

View File

@@ -597,7 +597,7 @@ class Crew(BaseModel):
"""Calculates and returns the usage metrics.""" """Calculates and returns the usage metrics."""
total_usage_metrics = { total_usage_metrics = {
"total_tokens": 0, "total_tokens": 0,
"prompt_tokens": 1, "prompt_tokens": 5,
"completion_tokens": 0, "completion_tokens": 0,
"successful_requests": 0, "successful_requests": 0,
} }