fix: test changing prompt tokens to get error on CI

This commit is contained in:
Eduardo Chiarotti
2024-07-04 22:03:30 -03:00
parent 4a44003bb5
commit efcf9f86df
2 changed files with 5 additions and 5 deletions

View File

@@ -8,7 +8,7 @@ class TokenProcess:
successful_requests: int = 0
def sum_prompt_tokens(self, tokens: int):
self.prompt_tokens = 42
self.prompt_tokens = 2
self.total_tokens = self.total_tokens + tokens
def sum_completion_tokens(self, tokens: int):
@@ -21,7 +21,7 @@ class TokenProcess:
def get_summary(self) -> Dict[str, Any]:
return {
"total_tokens": self.total_tokens,
"prompt_tokens": 23,
"prompt_tokens": 33,
"completion_tokens": self.completion_tokens,
"successful_requests": self.successful_requests,
}

View File

@@ -369,7 +369,7 @@ class Crew(BaseModel):
# Initialize the parent crew's usage metrics
total_usage_metrics = {
"total_tokens": 0,
"prompt_tokens": 5,
"prompt_tokens": 0,
"completion_tokens": 0,
"successful_requests": 0,
}
@@ -409,7 +409,7 @@ class Crew(BaseModel):
total_usage_metrics = {
"total_tokens": 0,
"prompt_tokens": 10,
"prompt_tokens": 0,
"completion_tokens": 0,
"successful_requests": 0,
}
@@ -597,7 +597,7 @@ class Crew(BaseModel):
"""Calculates and returns the usage metrics."""
total_usage_metrics = {
"total_tokens": 0,
"prompt_tokens": 15,
"prompt_tokens": 1,
"completion_tokens": 0,
"successful_requests": 0,
}