mirror of
https://github.com/crewAIInc/crewAI.git
synced 2025-12-19 22:08:31 +00:00
Compare commits
29 Commits
docs/chang
...
v0.36.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
19e7ddca60 | ||
|
|
ecd02034fb | ||
|
|
a79b216875 | ||
|
|
52217c2f63 | ||
|
|
7edacf6e24 | ||
|
|
58558a1950 | ||
|
|
1607c85ae5 | ||
|
|
a6ff342948 | ||
|
|
d2eb54ebf8 | ||
|
|
a41bd18599 | ||
|
|
bb64c80964 | ||
|
|
2fb56f1f9f | ||
|
|
35676fe2f5 | ||
|
|
81ed6f177e | ||
|
|
4bcd1df6bb | ||
|
|
6fae56dd60 | ||
|
|
430f0e9013 | ||
|
|
d7f080a978 | ||
|
|
5d18f73654 | ||
|
|
57fc079267 | ||
|
|
706f4cd74a | ||
|
|
2e3646cc96 | ||
|
|
844cc515d5 | ||
|
|
f47904134b | ||
|
|
d72b00af3c | ||
|
|
bd053a98c7 | ||
|
|
c18208ca59 | ||
|
|
acbe5af8ce | ||
|
|
c81146505a |
6
.github/workflows/tests.yml
vendored
6
.github/workflows/tests.yml
vendored
@@ -19,13 +19,13 @@ jobs:
|
|||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.11.9"
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
|
set -e
|
||||||
pip install poetry
|
pip install poetry
|
||||||
poetry lock &&
|
|
||||||
poetry install
|
poetry install
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: poetry run pytest tests
|
run: poetry run pytest
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -14,3 +14,4 @@ test.py
|
|||||||
rc-tests/*
|
rc-tests/*
|
||||||
*.pkl
|
*.pkl
|
||||||
temp/*
|
temp/*
|
||||||
|
.vscode/*
|
||||||
@@ -16,24 +16,24 @@ description: What are crewAI Agents and how to use them.
|
|||||||
|
|
||||||
## Agent Attributes
|
## Agent Attributes
|
||||||
|
|
||||||
| Attribute | Description |
|
| Attribute | Parameter | Description |
|
||||||
| :------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
| :------------------------- | :---- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| **Role** | Defines the agent's function within the crew. It determines the kind of tasks the agent is best suited for. |
|
| **Role** | `role` | Defines the agent's function within the crew. It determines the kind of tasks the agent is best suited for. |
|
||||||
| **Goal** | The individual objective that the agent aims to achieve. It guides the agent's decision-making process. |
|
| **Goal** | `goal` | The individual objective that the agent aims to achieve. It guides the agent's decision-making process. |
|
||||||
| **Backstory** | Provides context to the agent's role and goal, enriching the interaction and collaboration dynamics. |
|
| **Backstory** | `backstory` | Provides context to the agent's role and goal, enriching the interaction and collaboration dynamics. |
|
||||||
| **LLM** *(optional)* | Represents the language model that will run the agent. It dynamically fetches the model name from the `OPENAI_MODEL_NAME` environment variable, defaulting to "gpt-4" if not specified. |
|
| **LLM** *(optional)* | `llm` | Represents the language model that will run the agent. It dynamically fetches the model name from the `OPENAI_MODEL_NAME` environment variable, defaulting to "gpt-4" if not specified. |
|
||||||
| **Tools** *(optional)* | Set of capabilities or functions that the agent can use to perform tasks. Expected to be instances of custom classes compatible with the agent's execution environment. Tools are initialized with a default value of an empty list. |
|
| **Tools** *(optional)* | `tools` | Set of capabilities or functions that the agent can use to perform tasks. Expected to be instances of custom classes compatible with the agent's execution environment. Tools are initialized with a default value of an empty list. |
|
||||||
| **Function Calling LLM** *(optional)* | Specifies the language model that will handle the tool calling for this agent, overriding the crew function calling LLM if passed. Default is `None`. |
|
| **Function Calling LLM** *(optional)* | `function_calling_llm` | Specifies the language model that will handle the tool calling for this agent, overriding the crew function calling LLM if passed. Default is `None`. |
|
||||||
| **Max Iter** *(optional)* | `max_iter` is the maximum number of iterations the agent can perform before being forced to give its best answer. Default is `25`. |
|
| **Max Iter** *(optional)* | `max_iter` | Max Iter is the maximum number of iterations the agent can perform before being forced to give its best answer. Default is `25`. |
|
||||||
| **Max RPM** *(optional)* | `max_rpm` is Tte maximum number of requests per minute the agent can perform to avoid rate limits. It's optional and can be left unspecified, with a default value of `None`. |
|
| **Max RPM** *(optional)* | `max_rpm` | Max RPM is the maximum number of requests per minute the agent can perform to avoid rate limits. It's optional and can be left unspecified, with a default value of `None`. |
|
||||||
| **Max Execution Time** *(optional)* | `max_execution_time` is the Maximum execution time for an agent to execute a task. It's optional and can be left unspecified, with a default value of `None`, meaning no max execution time. |
|
| **Max Execution Time** *(optional)* | `max_execution_time` | Max Execution Time is the Maximum execution time for an agent to execute a task. It's optional and can be left unspecified, with a default value of `None`, meaning no max execution time. |
|
||||||
| **Verbose** *(optional)* | Setting this to `True` configures the internal logger to provide detailed execution logs, aiding in debugging and monitoring. Default is `False`. |
|
| **Verbose** *(optional)* | `verbose` | Setting this to `True` configures the internal logger to provide detailed execution logs, aiding in debugging and monitoring. Default is `False`. |
|
||||||
| **Allow Delegation** *(optional)* | Agents can delegate tasks or questions to one another, ensuring that each task is handled by the most suitable agent. Default is `True`. |
|
| **Allow Delegation** *(optional)* | `allow_delegation` | Agents can delegate tasks or questions to one another, ensuring that each task is handled by the most suitable agent. Default is `True`. |
|
||||||
| **Step Callback** *(optional)* | A function that is called after each step of the agent. This can be used to log the agent's actions or to perform other operations. It will overwrite the crew `step_callback`. |
|
| **Step Callback** *(optional)* | `step_callback` | A function that is called after each step of the agent. This can be used to log the agent's actions or to perform other operations. It will overwrite the crew `step_callback`. |
|
||||||
| **Cache** *(optional)* | Indicates if the agent should use a cache for tool usage. Default is `True`. |
|
| **Cache** *(optional)* | `cache` | Indicates if the agent should use a cache for tool usage. Default is `True`. |
|
||||||
| **System Template** *(optional)* | Specifies the system format for the agent. Default is `None`. |
|
| **System Template** *(optional)* | `system_template` | Specifies the system format for the agent. Default is `None`. |
|
||||||
| **Prompt Template** *(optional)* | Specifies the prompt format for the agent. Default is `None`. |
|
| **Prompt Template** *(optional)* | `prompt_template` | Specifies the prompt format for the agent. Default is `None`. |
|
||||||
| **Response Template** *(optional)* | Specifies the response format for the agent. Default is `None`. |
|
| **Response Template** *(optional)* | `response_template` | Specifies the response format for the agent. Default is `None`. |
|
||||||
|
|
||||||
## Creating an Agent
|
## Creating an Agent
|
||||||
|
|
||||||
|
|||||||
@@ -8,29 +8,29 @@ A crew in crewAI represents a collaborative group of agents working together to
|
|||||||
|
|
||||||
## Crew Attributes
|
## Crew Attributes
|
||||||
|
|
||||||
| Attribute | Description |
|
| Attribute | Parameters | Description |
|
||||||
| :-------------------------- | :----------------------------------------------------------- |
|
| :-------------------------- | :------------------ | :------------------------------------------------------------------------------------------------------- |
|
||||||
| **Tasks** | A list of tasks assigned to the crew. |
|
| **Tasks** | `tasks` | A list of tasks assigned to the crew. |
|
||||||
| **Agents** | A list of agents that are part of the crew. |
|
| **Agents** | `agents` | A list of agents that are part of the crew. |
|
||||||
| **Process** *(optional)* | The process flow (e.g., sequential, hierarchical) the crew follows. |
|
| **Process** *(optional)* | `process` | The process flow (e.g., sequential, hierarchical) the crew follows. |
|
||||||
| **Verbose** *(optional)* | The verbosity level for logging during execution. |
|
| **Verbose** *(optional)* | `verbose` | The verbosity level for logging during execution. |
|
||||||
| **Manager LLM** *(optional)*| The language model used by the manager agent in a hierarchical process. **Required when using a hierarchical process.** |
|
| **Manager LLM** *(optional)*| `manager_llm` | The language model used by the manager agent in a hierarchical process. **Required when using a hierarchical process.** |
|
||||||
| **Function Calling LLM** *(optional)* | If passed, the crew will use this LLM to do function calling for tools for all agents in the crew. Each agent can have its own LLM, which overrides the crew's LLM for function calling. |
|
| **Function Calling LLM** *(optional)* | `function_calling_llm` | If passed, the crew will use this LLM to do function calling for tools for all agents in the crew. Each agent can have its own LLM, which overrides the crew's LLM for function calling. |
|
||||||
| **Config** *(optional)* | Optional configuration settings for the crew, in `Json` or `Dict[str, Any]` format. |
|
| **Config** *(optional)* | `config` | Optional configuration settings for the crew, in `Json` or `Dict[str, Any]` format. |
|
||||||
| **Max RPM** *(optional)* | Maximum requests per minute the crew adheres to during execution. |
|
| **Max RPM** *(optional)* | `max_rpm` | Maximum requests per minute the crew adheres to during execution. |
|
||||||
| **Language** *(optional)* | Language used for the crew, defaults to English. |
|
| **Language** *(optional)* | `language` | Language used for the crew, defaults to English. |
|
||||||
| **Language File** *(optional)* | Path to the language file to be used for the crew. |
|
| **Language File** *(optional)* | `language_file` | Path to the language file to be used for the crew. |
|
||||||
| **Memory** *(optional)* | Utilized for storing execution memories (short-term, long-term, entity memory). |
|
| **Memory** *(optional)* | `memory` | Utilized for storing execution memories (short-term, long-term, entity memory). |
|
||||||
| **Cache** *(optional)* | Specifies whether to use a cache for storing the results of tools' execution. |
|
| **Cache** *(optional)* | `cache` | Specifies whether to use a cache for storing the results of tools' execution. |
|
||||||
| **Embedder** *(optional)* | Configuration for the embedder to be used by the crew. Mostly used by memory for now. |
|
| **Embedder** *(optional)* | `embedder` | Configuration for the embedder to be used by the crew. Mostly used by memory for now. |
|
||||||
| **Full Output** *(optional)*| Whether the crew should return the full output with all tasks outputs or just the final output. |
|
| **Full Output** *(optional)*| `full_output` | Whether the crew should return the full output with all tasks outputs or just the final output. |
|
||||||
| **Step Callback** *(optional)* | A function that is called after each step of every agent. This can be used to log the agent's actions or to perform other operations; it won't override the agent-specific `step_callback`. |
|
| **Step Callback** *(optional)* | `step_callback` | A function that is called after each step of every agent. This can be used to log the agent's actions or to perform other operations; it won't override the agent-specific `step_callback`. |
|
||||||
| **Task Callback** *(optional)* | A function that is called after the completion of each task. Useful for monitoring or additional operations post-task execution. |
|
| **Task Callback** *(optional)* | `task_callback` | A function that is called after the completion of each task. Useful for monitoring or additional operations post-task execution. |
|
||||||
| **Share Crew** *(optional)* | Whether you want to share the complete crew information and execution with the crewAI team to make the library better, and allow us to train models. |
|
| **Share Crew** *(optional)* | `share_crew` | Whether you want to share the complete crew information and execution with the crewAI team to make the library better, and allow us to train models. |
|
||||||
| **Output Log File** *(optional)* | Whether you want to have a file with the complete crew output and execution. You can set it using True and it will default to the folder you are currently in and it will be called logs.txt or passing a string with the full path and name of the file. |
|
| **Output Log File** *(optional)* | `output_log_file` | Whether you want to have a file with the complete crew output and execution. You can set it using True and it will default to the folder you are currently in and it will be called logs.txt or passing a string with the full path and name of the file. |
|
||||||
| **Manager Agent** *(optional)* | `manager` sets a custom agent that will be used as a manager. |
|
| **Manager Agent** *(optional)* | `manager_agent` | `manager` sets a custom agent that will be used as a manager. |
|
||||||
| **Manager Callbacks** *(optional)* | `manager_callbacks` takes a list of callback handlers to be executed by the manager agent when a hierarchical process is used. |
|
| **Manager Callbacks** *(optional)* | `manager_callbacks` | `manager_callbacks` takes a list of callback handlers to be executed by the manager agent when a hierarchical process is used. |
|
||||||
| **Prompt File** *(optional)* | Path to the prompt JSON file to be used for the crew. |
|
| **Prompt File** *(optional)* | `prompt_file` | Path to the prompt JSON file to be used for the crew. |
|
||||||
|
|
||||||
!!! note "Crew Max RPM"
|
!!! note "Crew Max RPM"
|
||||||
The `max_rpm` attribute sets the maximum number of requests per minute the crew can perform to avoid rate limits and will override individual agents' `max_rpm` settings if you set it.
|
The `max_rpm` attribute sets the maximum number of requests per minute the crew can perform to avoid rate limits and will override individual agents' `max_rpm` settings if you set it.
|
||||||
@@ -123,7 +123,7 @@ result = my_crew.kickoff()
|
|||||||
print(result)
|
print(result)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Different wayt to Kicking Off a Crew
|
### Different ways to Kicking Off a Crew
|
||||||
|
|
||||||
Once your crew is assembled, initiate the workflow with the appropriate kickoff method. CrewAI provides several methods for better control over the kickoff process: `kickoff()`, `kickoff_for_each()`, `kickoff_async()`, and `kickoff_for_each_async()`.
|
Once your crew is assembled, initiate the workflow with the appropriate kickoff method. CrewAI provides several methods for better control over the kickoff process: `kickoff()`, `kickoff_for_each()`, `kickoff_async()`, and `kickoff_for_each_async()`.
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ description: Leveraging memory systems in the crewAI framework to enhance agent
|
|||||||
| Component | Description |
|
| Component | Description |
|
||||||
| :------------------- | :----------------------------------------------------------- |
|
| :------------------- | :----------------------------------------------------------- |
|
||||||
| **Short-Term Memory**| Temporarily stores recent interactions and outcomes, enabling agents to recall and utilize information relevant to their current context during the current executions. |
|
| **Short-Term Memory**| Temporarily stores recent interactions and outcomes, enabling agents to recall and utilize information relevant to their current context during the current executions. |
|
||||||
| **Long-Term Memory** | Preserves valuable insights and learnings from past executions, allowing agents to build and refine their knowledge over time. So Agents can remeber what they did right and wrong across multiple executions |
|
| **Long-Term Memory** | Preserves valuable insights and learnings from past executions, allowing agents to build and refine their knowledge over time. So Agents can remember what they did right and wrong across multiple executions |
|
||||||
| **Entity Memory** | Captures and organizes information about entities (people, places, concepts) encountered during tasks, facilitating deeper understanding and relationship mapping. |
|
| **Entity Memory** | Captures and organizes information about entities (people, places, concepts) encountered during tasks, facilitating deeper understanding and relationship mapping. |
|
||||||
| **Contextual Memory**| Maintains the context of interactions by combining `ShortTermMemory`, `LongTermMemory`, and `EntityMemory`, aiding in the coherence and relevance of agent responses over a sequence of tasks or a conversation. |
|
| **Contextual Memory**| Maintains the context of interactions by combining `ShortTermMemory`, `LongTermMemory`, and `EntityMemory`, aiding in the coherence and relevance of agent responses over a sequence of tasks or a conversation. |
|
||||||
|
|
||||||
|
|||||||
@@ -11,20 +11,20 @@ Tasks within crewAI can be collaborative, requiring multiple agents to work toge
|
|||||||
|
|
||||||
## Task Attributes
|
## Task Attributes
|
||||||
|
|
||||||
| Attribute | Description |
|
| Attribute | Parameters | Description |
|
||||||
| :----------------------| :-------------------------------------------------------------------------------------------- |
|
| :----------------------| :------------------- | :-------------------------------------------------------------------------------------------- |
|
||||||
| **Description** | A clear, concise statement of what the task entails. |
|
| **Description** | `description` | A clear, concise statement of what the task entails. |
|
||||||
| **Agent** | The agent responsible for the task, assigned either directly or by the crew's process. |
|
| **Agent** | `agent` | The agent responsible for the task, assigned either directly or by the crew's process. |
|
||||||
| **Expected Output** | A detailed description of what the task's completion looks like. |
|
| **Expected Output** | `expected_output` | A detailed description of what the task's completion looks like. |
|
||||||
| **Tools** *(optional)* | The functions or capabilities the agent can utilize to perform the task. |
|
| **Tools** *(optional)* | `tools` | The functions or capabilities the agent can utilize to perform the task. |
|
||||||
| **Async Execution** *(optional)* | If set, the task executes asynchronously, allowing progression without waiting for completion.|
|
| **Async Execution** *(optional)* | `async_execution` | If set, the task executes asynchronously, allowing progression without waiting for completion.|
|
||||||
| **Context** *(optional)* | Specifies tasks whose outputs are used as context for this task. |
|
| **Context** *(optional)* | `context` | Specifies tasks whose outputs are used as context for this task. |
|
||||||
| **Config** *(optional)* | Additional configuration details for the agent executing the task, allowing further customization. |
|
| **Config** *(optional)* | `config` | Additional configuration details for the agent executing the task, allowing further customization. |
|
||||||
| **Output JSON** *(optional)* | Outputs a JSON object, requiring an OpenAI client. Only one output format can be set. |
|
| **Output JSON** *(optional)* | `output_json` | Outputs a JSON object, requiring an OpenAI client. Only one output format can be set. |
|
||||||
| **Output Pydantic** *(optional)* | Outputs a Pydantic model object, requiring an OpenAI client. Only one output format can be set. |
|
| **Output Pydantic** *(optional)* | `output_pydantic` | Outputs a Pydantic model object, requiring an OpenAI client. Only one output format can be set. |
|
||||||
| **Output File** *(optional)* | Saves the task output to a file. If used with `Output JSON` or `Output Pydantic`, specifies how the output is saved. |
|
| **Output File** *(optional)* | `output_file` | Saves the task output to a file. If used with `Output JSON` or `Output Pydantic`, specifies how the output is saved. |
|
||||||
| **Callback** *(optional)* | A Python callable that is executed with the task's output upon completion. |
|
| **Callback** *(optional)* | `callback` | A Python callable that is executed with the task's output upon completion. |
|
||||||
| **Human Input** *(optional)* | Indicates if the task requires human feedback at the end, useful for tasks needing human oversight. |
|
| **Human Input** *(optional)* | `human_input` | Indicates if the task requires human feedback at the end, useful for tasks needing human oversight. |
|
||||||
|
|
||||||
## Creating a Task
|
## Creating a Task
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ The training feature in CrewAI allows you to train your AI agents using the comm
|
|||||||
|
|
||||||
During training, CrewAI utilizes techniques to optimize the performance of your agents along with human feedback. This helps the agents improve their understanding, decision-making, and problem-solving abilities.
|
During training, CrewAI utilizes techniques to optimize the performance of your agents along with human feedback. This helps the agents improve their understanding, decision-making, and problem-solving abilities.
|
||||||
|
|
||||||
|
### Training Your Crew Using the CLI
|
||||||
To use the training feature, follow these steps:
|
To use the training feature, follow these steps:
|
||||||
|
|
||||||
1. Open your terminal or command prompt.
|
1. Open your terminal or command prompt.
|
||||||
@@ -18,7 +19,26 @@ To use the training feature, follow these steps:
|
|||||||
crewai train -n <n_iterations>
|
crewai train -n <n_iterations>
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace `<n_iterations>` with the desired number of training iterations. This determines how many times the agents will go through the training process.
|
### Training Your Crew Programmatically
|
||||||
|
To train your crew programmatically, use the following steps:
|
||||||
|
|
||||||
|
1. Define the number of iterations for training.
|
||||||
|
2. Specify the input parameters for the training process.
|
||||||
|
3. Execute the training command within a try-except block to handle potential errors.
|
||||||
|
|
||||||
|
```python
|
||||||
|
n_iterations = 2
|
||||||
|
inputs = {"topic": "CrewAI Training"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
YourCrewName_Crew().crew().train(n_iterations= n_iterations, inputs=inputs)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"An error occurred while training the crew: {e}")
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! note "Replace `<n_iterations>` with the desired number of training iterations. This determines how many times the agents will go through the training process."
|
||||||
|
|
||||||
|
|
||||||
### Key Points to Note:
|
### Key Points to Note:
|
||||||
- **Positive Integer Requirement:** Ensure that the number of iterations (`n_iterations`) is a positive integer. The code will raise a `ValueError` if this condition is not met.
|
- **Positive Integer Requirement:** Ensure that the number of iterations (`n_iterations`) is a positive integer. The code will raise a `ValueError` if this condition is not met.
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ To optimize tool performance with caching, define custom caching strategies usin
|
|||||||
@tool("Tool with Caching")
|
@tool("Tool with Caching")
|
||||||
def cached_tool(argument: str) -> str:
|
def cached_tool(argument: str) -> str:
|
||||||
"""Tool functionality description."""
|
"""Tool functionality description."""
|
||||||
return "Cachable result"
|
return "Cacheable result"
|
||||||
|
|
||||||
def my_cache_strategy(arguments: dict, result: str) -> bool:
|
def my_cache_strategy(arguments: dict, result: str) -> bool:
|
||||||
# Define custom caching logic
|
# Define custom caching logic
|
||||||
|
|||||||
@@ -80,4 +80,3 @@ manager = Agent(
|
|||||||
1. `allow_code_execution`: Enable or disable code execution capabilities for the agent (default is False).
|
1. `allow_code_execution`: Enable or disable code execution capabilities for the agent (default is False).
|
||||||
2. `max_execution_time`: Set a maximum execution time (in seconds) for the agent to complete a task.
|
2. `max_execution_time`: Set a maximum execution time (in seconds) for the agent to complete a task.
|
||||||
3. `function_calling_llm`: Specify a separate language model for function calling.
|
3. `function_calling_llm`: Specify a separate language model for function calling.
|
||||||
4
|
|
||||||
@@ -37,10 +37,9 @@ writer = Agent(
|
|||||||
backstory='A skilled writer with a talent for crafting compelling narratives'
|
backstory='A skilled writer with a talent for crafting compelling narratives'
|
||||||
)
|
)
|
||||||
|
|
||||||
# Define the tasks in sequence
|
research_task = Task(description='Gather relevant data...', agent=researcher, expected_output='Raw Data')
|
||||||
research_task = Task(description='Gather relevant data...', agent=researcher)
|
analysis_task = Task(description='Analyze the data...', agent=analyst, expected_output='Data Insights')
|
||||||
analysis_task = Task(description='Analyze the data...', agent=analyst)
|
writing_task = Task(description='Compose the report...', agent=writer, expected_output='Final Report')
|
||||||
writing_task = Task(description='Compose the report...', agent=writer)
|
|
||||||
|
|
||||||
# Form the crew with a sequential process
|
# Form the crew with a sequential process
|
||||||
report_crew = Crew(
|
report_crew = Crew(
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
We are still working on improving tools, so there might be unexpected behavior or changes in the future.
|
We are still working on improving tools, so there might be unexpected behavior or changes in the future.
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
The GithubSearchTool is a Read, Append, and Generate (RAG) tool specifically designed for conducting semantic searches within GitHub repositories. Utilizing advanced semantic search capabilities, it sifts through code, pull requests, issues, and repositories, making it an essential tool for developers, researchers, or anyone in need of precise information from GitHub.
|
The GithubSearchTool is a Retrieval-Augmented Generation (RAG) tool specifically designed for conducting semantic searches within GitHub repositories. Utilizing advanced semantic search capabilities, it sifts through code, pull requests, issues, and repositories, making it an essential tool for developers, researchers, or anyone in need of precise information from GitHub.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
To use the GithubSearchTool, first ensure the crewai_tools package is installed in your Python environment:
|
To use the GithubSearchTool, first ensure the crewai_tools package is installed in your Python environment:
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ tool = TXTSearchTool(txt='path/to/text/file.txt')
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Arguments
|
## Arguments
|
||||||
- `txt` (str): **Optinal**. The path to the text file you want to search. This argument is only required if the tool was not initialized with a specific text file; otherwise, the search will be conducted within the initially provided text file.
|
- `txt` (str): **Optional**. The path to the text file you want to search. This argument is only required if the tool was not initialized with a specific text file; otherwise, the search will be conducted within the initially provided text file.
|
||||||
|
|
||||||
## Custom model and embeddings
|
## Custom model and embeddings
|
||||||
|
|
||||||
|
|||||||
552
poetry.lock
generated
552
poetry.lock
generated
@@ -1,5 +1,25 @@
|
|||||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "agentops"
|
||||||
|
version = "0.1.12"
|
||||||
|
description = "Python SDK for developing AI agent evals and observability"
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "agentops-0.1.12-py3-none-any.whl", hash = "sha256:b4b47c990638b74810cc1c38624ada162094b46e3fdd63883642a16bc5258386"},
|
||||||
|
{file = "agentops-0.1.12.tar.gz", hash = "sha256:c4f762482fb240fc3503907f52498f2d8d9e4f80236ee4a12bf039317a85fcd7"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
packaging = "23.2"
|
||||||
|
psutil = "5.9.8"
|
||||||
|
requests = "2.31.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["pytest (==7.4.0)", "requests-mock (==1.11.0)"]
|
||||||
|
langchain = ["langchain (>=1.19,<2.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohttp"
|
name = "aiohttp"
|
||||||
version = "3.9.5"
|
version = "3.9.5"
|
||||||
@@ -323,17 +343,17 @@ lxml = ["lxml"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "boto3"
|
name = "boto3"
|
||||||
version = "1.34.137"
|
version = "1.34.140"
|
||||||
description = "The AWS SDK for Python"
|
description = "The AWS SDK for Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "boto3-1.34.137-py3-none-any.whl", hash = "sha256:7cb697d67fd138ceebc6f789919ae370c092a50c6b0ccc4ef483027935502eab"},
|
{file = "boto3-1.34.140-py3-none-any.whl", hash = "sha256:23ca8d8f7a30c3bbd989808056b5fc5d68ff5121c02c722c6167b6b1bb7f8726"},
|
||||||
{file = "boto3-1.34.137.tar.gz", hash = "sha256:0b21b84db4619b3711a6f643d465a5a25e81231ee43615c55a20ff6b89c6cc3c"},
|
{file = "boto3-1.34.140.tar.gz", hash = "sha256:578bbd5e356005719b6b610d03edff7ea1b0824d078afe62d3fb8bea72f83a87"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
botocore = ">=1.34.137,<1.35.0"
|
botocore = ">=1.34.140,<1.35.0"
|
||||||
jmespath = ">=0.7.1,<2.0.0"
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
s3transfer = ">=0.10.0,<0.11.0"
|
s3transfer = ">=0.10.0,<0.11.0"
|
||||||
|
|
||||||
@@ -342,13 +362,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "botocore"
|
name = "botocore"
|
||||||
version = "1.34.137"
|
version = "1.34.140"
|
||||||
description = "Low-level, data-driven core of boto 3."
|
description = "Low-level, data-driven core of boto 3."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "botocore-1.34.137-py3-none-any.whl", hash = "sha256:a980fa4adec4bfa23fff70a3512622e9412c69c791898a52cafc2458b0be6040"},
|
{file = "botocore-1.34.140-py3-none-any.whl", hash = "sha256:43940d3a67d946ba3301631ba4078476a75f1015d4fb0fb0272d0b754b2cf9de"},
|
||||||
{file = "botocore-1.34.137.tar.gz", hash = "sha256:e29c8e9bfda0b20a1997792968e85868bfce42fefad9730f633a81adcff3f2ef"},
|
{file = "botocore-1.34.140.tar.gz", hash = "sha256:86302b2226c743b9eec7915a4c6cfaffd338ae03989cd9ee181078ef39d1ab39"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -438,13 +458,13 @@ test = ["flake8", "isort", "pytest"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2024.6.2"
|
version = "2024.7.4"
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
|
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
|
||||||
{file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
|
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -727,13 +747,13 @@ all = ["pycocotools (==2.0.6)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clarifai-grpc"
|
name = "clarifai-grpc"
|
||||||
version = "10.5.4"
|
version = "10.6.1"
|
||||||
description = "Clarifai gRPC API Client"
|
description = "Clarifai gRPC API Client"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "clarifai_grpc-10.5.4-py3-none-any.whl", hash = "sha256:ae4c4d8985fdd2bf326cec27ee834571e44d0e989fb12686dd681f9b553ae218"},
|
{file = "clarifai_grpc-10.6.1-py3-none-any.whl", hash = "sha256:7f07c262f46042995b11af10cdd552718c4487e955db1b3f1253fcb0c2ab1ce1"},
|
||||||
{file = "clarifai_grpc-10.5.4.tar.gz", hash = "sha256:c67ce0dde186e8bab0d42a9923d28ddb4a05017b826c8e52ac7a86ec6df5f12a"},
|
{file = "clarifai_grpc-10.6.1.tar.gz", hash = "sha256:f692e3d6a051a1228ca371c3a9dc705cc9a61334eecc454d056f7af0b6f4dbad"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -820,13 +840,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crewai-tools"
|
name = "crewai-tools"
|
||||||
version = "0.4.5"
|
version = "0.4.7"
|
||||||
description = "Set of tools for the crewAI framework"
|
description = "Set of tools for the crewAI framework"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<=3.13,>=3.10"
|
python-versions = "<=3.13,>=3.10"
|
||||||
files = [
|
files = [
|
||||||
{file = "crewai_tools-0.4.5-py3-none-any.whl", hash = "sha256:d970a152a69d039eb23150755d4dc9e7c6ef9176b19a80348142619518e39b17"},
|
{file = "crewai_tools-0.4.7-py3-none-any.whl", hash = "sha256:3ff04b2da07d2c48e72f898511295b4a10038dd3e4fe859baa93fec1fb8baf8e"},
|
||||||
{file = "crewai_tools-0.4.5.tar.gz", hash = "sha256:1d7763f20afd95c6be70d31f9f0e9334cb42be127c17feddce368907077a6543"},
|
{file = "crewai_tools-0.4.7.tar.gz", hash = "sha256:4502a5e0ab94a7dae6638d000768f80049918909ca5338cdebc280351b3ce003"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -834,9 +854,9 @@ beautifulsoup4 = ">=4.12.3,<5.0.0"
|
|||||||
chromadb = ">=0.4.22,<0.5.0"
|
chromadb = ">=0.4.22,<0.5.0"
|
||||||
docker = ">=7.1.0,<8.0.0"
|
docker = ">=7.1.0,<8.0.0"
|
||||||
docx2txt = ">=0.8,<0.9"
|
docx2txt = ">=0.8,<0.9"
|
||||||
embedchain = ">=0.1.113,<0.2.0"
|
embedchain = ">=0.1.114,<0.2.0"
|
||||||
lancedb = ">=0.5.4,<0.6.0"
|
lancedb = ">=0.5.4,<0.6.0"
|
||||||
langchain = ">=0.1.4,<0.2.0"
|
langchain = ">0.2,<=0.3"
|
||||||
openai = ">=1.12.0,<2.0.0"
|
openai = ">=1.12.0,<2.0.0"
|
||||||
pydantic = ">=2.6.1,<3.0.0"
|
pydantic = ">=2.6.1,<3.0.0"
|
||||||
pyright = ">=1.1.350,<2.0.0"
|
pyright = ">=1.1.350,<2.0.0"
|
||||||
@@ -1034,13 +1054,13 @@ idna = ">=2.0.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "embedchain"
|
name = "embedchain"
|
||||||
version = "0.1.113"
|
version = "0.1.115"
|
||||||
description = "Simplest open source retrieval (RAG) framework"
|
description = "Simplest open source retrieval (RAG) framework"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<=3.13,>=3.9"
|
python-versions = "<=3.13,>=3.9"
|
||||||
files = [
|
files = [
|
||||||
{file = "embedchain-0.1.113-py3-none-any.whl", hash = "sha256:f37b029d8f8509a5db99d1579168ab2ba7d5841c280289f6a2ae702601caf96f"},
|
{file = "embedchain-0.1.115-py3-none-any.whl", hash = "sha256:6f1842edea7780e6b1d21d073b72b2c916f440411c32e758165c88141b2ac1ec"},
|
||||||
{file = "embedchain-0.1.113.tar.gz", hash = "sha256:5477012d37912a0e89758263b1a8db4699b7d0dedd7f18ccc89f3381d6b9173d"},
|
{file = "embedchain-0.1.115.tar.gz", hash = "sha256:8b259a3307e022924c99cf82b17789f141d157f11e53beb3bc82bf49efa2e31e"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -1048,10 +1068,12 @@ alembic = ">=1.13.1,<2.0.0"
|
|||||||
beautifulsoup4 = ">=4.12.2,<5.0.0"
|
beautifulsoup4 = ">=4.12.2,<5.0.0"
|
||||||
chromadb = ">=0.4.24,<0.5.0"
|
chromadb = ">=0.4.24,<0.5.0"
|
||||||
clarifai = ">=10.0.1,<11.0.0"
|
clarifai = ">=10.0.1,<11.0.0"
|
||||||
|
cohere = ">=5.3,<6.0"
|
||||||
google-cloud-aiplatform = ">=1.26.1,<2.0.0"
|
google-cloud-aiplatform = ">=1.26.1,<2.0.0"
|
||||||
gptcache = ">=0.1.43,<0.2.0"
|
gptcache = ">=0.1.43,<0.2.0"
|
||||||
langchain = ">=0.1.4,<0.2.0"
|
langchain = ">0.2,<=0.3"
|
||||||
langchain-cohere = ">=0.1.4,<0.2.0"
|
langchain-cohere = ">=0.1.4,<0.2.0"
|
||||||
|
langchain-community = ">=0.2.6,<0.3.0"
|
||||||
langchain-openai = ">=0.1.7,<0.2.0"
|
langchain-openai = ">=0.1.7,<0.2.0"
|
||||||
openai = ">=1.1.1"
|
openai = ">=1.1.1"
|
||||||
posthog = ">=3.0.2,<4.0.0"
|
posthog = ">=3.0.2,<4.0.0"
|
||||||
@@ -1065,7 +1087,6 @@ tiktoken = ">=0.7.0,<0.8.0"
|
|||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
aws-bedrock = ["boto3 (>=1.34.20,<2.0.0)"]
|
aws-bedrock = ["boto3 (>=1.34.20,<2.0.0)"]
|
||||||
cohere = ["cohere (>=5.3,<6.0)"]
|
|
||||||
dataloaders = ["docx2txt (>=0.8,<0.9)", "duckduckgo-search (>=6.1.5,<7.0.0)", "pytube (>=15.0.0,<16.0.0)", "sentence-transformers (>=2.2.2,<3.0.0)", "youtube-transcript-api (>=0.6.1,<0.7.0)"]
|
dataloaders = ["docx2txt (>=0.8,<0.9)", "duckduckgo-search (>=6.1.5,<7.0.0)", "pytube (>=15.0.0,<16.0.0)", "sentence-transformers (>=2.2.2,<3.0.0)", "youtube-transcript-api (>=0.6.1,<0.7.0)"]
|
||||||
discord = ["discord (>=2.3.2,<3.0.0)"]
|
discord = ["discord (>=2.3.2,<3.0.0)"]
|
||||||
dropbox = ["dropbox (>=11.36.2,<12.0.0)"]
|
dropbox = ["dropbox (>=11.36.2,<12.0.0)"]
|
||||||
@@ -1078,7 +1099,7 @@ huggingface-hub = ["huggingface_hub (>=0.17.3,<0.18.0)"]
|
|||||||
lancedb = ["lancedb (>=0.6.2,<0.7.0)"]
|
lancedb = ["lancedb (>=0.6.2,<0.7.0)"]
|
||||||
llama2 = ["replicate (>=0.15.4,<0.16.0)"]
|
llama2 = ["replicate (>=0.15.4,<0.16.0)"]
|
||||||
milvus = ["pymilvus (==2.4.3)"]
|
milvus = ["pymilvus (==2.4.3)"]
|
||||||
mistralai = ["langchain-mistralai (>=0.0.3,<0.0.4)"]
|
mistralai = ["langchain-mistralai (>=0.1.9,<0.2.0)"]
|
||||||
modal = ["modal (>=0.56.4329,<0.57.0)"]
|
modal = ["modal (>=0.56.4329,<0.57.0)"]
|
||||||
mysql = ["mysql-connector-python (>=8.1.0,<9.0.0)"]
|
mysql = ["mysql-connector-python (>=8.1.0,<9.0.0)"]
|
||||||
opensearch = ["opensearch-py (==2.3.1)"]
|
opensearch = ["opensearch-py (==2.3.1)"]
|
||||||
@@ -1089,7 +1110,7 @@ qdrant = ["qdrant-client (>=1.6.3,<2.0.0)"]
|
|||||||
rss-feed = ["feedparser (>=6.0.10,<7.0.0)", "listparser (>=0.19,<0.20)", "newspaper3k (>=0.2.8,<0.3.0)"]
|
rss-feed = ["feedparser (>=6.0.10,<7.0.0)", "listparser (>=0.19,<0.20)", "newspaper3k (>=0.2.8,<0.3.0)"]
|
||||||
slack = ["flask (>=2.3.3,<3.0.0)", "slack-sdk (==3.21.3)"]
|
slack = ["flask (>=2.3.3,<3.0.0)", "slack-sdk (==3.21.3)"]
|
||||||
together = ["together (>=0.2.8,<0.3.0)"]
|
together = ["together (>=0.2.8,<0.3.0)"]
|
||||||
vertexai = ["langchain-google-vertexai (>=0.0.5,<0.0.6)"]
|
vertexai = ["langchain-google-vertexai (>=1.0.6,<2.0.0)"]
|
||||||
weaviate = ["weaviate-client (>=3.24.1,<4.0.0)"]
|
weaviate = ["weaviate-client (>=3.24.1,<4.0.0)"]
|
||||||
whatsapp = ["flask (>=2.3.3,<3.0.0)", "twilio (>=8.5.0,<9.0.0)"]
|
whatsapp = ["flask (>=2.3.3,<3.0.0)", "twilio (>=8.5.0,<9.0.0)"]
|
||||||
youtube = ["youtube-transcript-api (>=0.6.1,<0.7.0)", "yt_dlp (>=2023.11.14,<2024.0.0)"]
|
youtube = ["youtube-transcript-api (>=0.6.1,<0.7.0)", "yt_dlp (>=2023.11.14,<2024.0.0)"]
|
||||||
@@ -1423,13 +1444,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "google-cloud-aiplatform"
|
name = "google-cloud-aiplatform"
|
||||||
version = "1.57.0"
|
version = "1.58.0"
|
||||||
description = "Vertex AI API client library"
|
description = "Vertex AI API client library"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "google-cloud-aiplatform-1.57.0.tar.gz", hash = "sha256:113905f100cb0a9ad744a2445a7675f92f28600233ba499614aa704d11a809b7"},
|
{file = "google-cloud-aiplatform-1.58.0.tar.gz", hash = "sha256:7a05aceac4a6c7eaa26e684e9f202b829cc7e57f82bffe7281684275a553fcad"},
|
||||||
{file = "google_cloud_aiplatform-1.57.0-py2.py3-none-any.whl", hash = "sha256:ca5391a56e0cc8f4ed39a2beb7be02f51936ff04fd5304775a72a86c345d0e47"},
|
{file = "google_cloud_aiplatform-1.58.0-py2.py3-none-any.whl", hash = "sha256:21f1320860f4916183ec939fdf2ff3fc1d7fdde97fe5795974257ab21f9458ec"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -1450,7 +1471,7 @@ autologging = ["mlflow (>=1.27.0,<=2.1.1)"]
|
|||||||
cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||||
datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
|
datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
|
||||||
endpoint = ["requests (>=2.28.1)"]
|
endpoint = ["requests (>=2.28.1)"]
|
||||||
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||||
langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "tenacity (<=8.3)"]
|
langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "tenacity (<=8.3)"]
|
||||||
langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"]
|
langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"]
|
||||||
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
|
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||||
@@ -1459,12 +1480,12 @@ pipelines = ["pyyaml (>=5.3.1,<7)"]
|
|||||||
prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"]
|
prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"]
|
||||||
preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"]
|
preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"]
|
||||||
private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
|
private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
|
||||||
rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"]
|
rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"]
|
||||||
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"]
|
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"]
|
||||||
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
|
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
|
||||||
reasoningengine = ["cloudpickle (>=3.0,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"]
|
reasoningengine = ["cloudpickle (>=3.0,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"]
|
||||||
tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||||
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
|
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
|
||||||
tokenization = ["sentencepiece (>=0.2.0)"]
|
tokenization = ["sentencepiece (>=0.2.0)"]
|
||||||
vizier = ["google-vizier (>=0.1.6)"]
|
vizier = ["google-vizier (>=0.1.6)"]
|
||||||
xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
|
xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||||
@@ -2357,156 +2378,140 @@ tests = ["aiohttp", "duckdb", "pandas (>=1.4)", "polars (>=0.19)", "pytest", "py
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain"
|
name = "langchain"
|
||||||
version = "0.1.20"
|
version = "0.2.6"
|
||||||
description = "Building applications with LLMs through composability"
|
description = "Building applications with LLMs through composability"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9"},
|
{file = "langchain-0.2.6-py3-none-any.whl", hash = "sha256:f86e8a7afd3e56f8eb5ba47f01dd00144fb9fc2f1db9873bd197347be2857aa4"},
|
||||||
{file = "langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e"},
|
{file = "langchain-0.2.6.tar.gz", hash = "sha256:867f6add370c1e3911b0e87d3dd0e36aec1e8f513bf06131340fe8f151d89dc5"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
aiohttp = ">=3.8.3,<4.0.0"
|
aiohttp = ">=3.8.3,<4.0.0"
|
||||||
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
||||||
dataclasses-json = ">=0.5.7,<0.7"
|
langchain-core = ">=0.2.10,<0.3.0"
|
||||||
langchain-community = ">=0.0.38,<0.1"
|
langchain-text-splitters = ">=0.2.0,<0.3.0"
|
||||||
langchain-core = ">=0.1.52,<0.2.0"
|
|
||||||
langchain-text-splitters = ">=0.0.1,<0.1"
|
|
||||||
langsmith = ">=0.1.17,<0.2.0"
|
langsmith = ">=0.1.17,<0.2.0"
|
||||||
numpy = ">=1,<2"
|
numpy = [
|
||||||
|
{version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""},
|
||||||
|
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||||
|
]
|
||||||
pydantic = ">=1,<3"
|
pydantic = ">=1,<3"
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=5.3"
|
||||||
requests = ">=2,<3"
|
requests = ">=2,<3"
|
||||||
SQLAlchemy = ">=1.4,<3"
|
SQLAlchemy = ">=1.4,<3"
|
||||||
tenacity = ">=8.1.0,<9.0.0"
|
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
|
|
||||||
clarifai = ["clarifai (>=9.1.0)"]
|
|
||||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
|
||||||
cohere = ["cohere (>=4,<6)"]
|
|
||||||
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
|
|
||||||
embeddings = ["sentence-transformers (>=2,<3)"]
|
|
||||||
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
|
||||||
javascript = ["esprima (>=4.0.1,<5.0.0)"]
|
|
||||||
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
|
|
||||||
openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
|
|
||||||
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
|
|
||||||
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-cohere"
|
name = "langchain-cohere"
|
||||||
version = "0.1.5"
|
version = "0.1.8"
|
||||||
description = "An integration package connecting Cohere and LangChain"
|
description = "An integration package connecting Cohere and LangChain"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langchain_cohere-0.1.5-py3-none-any.whl", hash = "sha256:f07bd53fadbebf744b8de1eebf977353f340f2010156821623a0c6247032ab9b"},
|
{file = "langchain_cohere-0.1.8-py3-none-any.whl", hash = "sha256:d3ef73d5050513ff3ca0f07c8f3f73b7773eec182312aae92138d3a0ad33e631"},
|
||||||
{file = "langchain_cohere-0.1.5.tar.gz", hash = "sha256:d0be4e76079a74c4259fe4db2bab535d690efe0efac5e9e2fbf486476c0a85c8"},
|
{file = "langchain_cohere-0.1.8.tar.gz", hash = "sha256:edbeca8d041186d2831b495d9a392a0a94d15b0e2c98863e0a0cd001fc888842"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
cohere = ">=5.5,<6.0"
|
cohere = ">=5.5.6,<6.0"
|
||||||
langchain-core = ">=0.1.42,<0.3"
|
langchain-core = ">=0.2.0,<0.3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-community"
|
name = "langchain-community"
|
||||||
version = "0.0.38"
|
version = "0.2.6"
|
||||||
description = "Community contributed LangChain integrations."
|
description = "Community contributed LangChain integrations."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a"},
|
{file = "langchain_community-0.2.6-py3-none-any.whl", hash = "sha256:758cc800acfe5dd396bf8ba1b57c4792639ead0eab48ed0367f0732ec6ee1f68"},
|
||||||
{file = "langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d"},
|
{file = "langchain_community-0.2.6.tar.gz", hash = "sha256:40ce09a50ed798aa651ddb34c8978200fa8589b9813c7a28ce8af027bbf249f0"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
aiohttp = ">=3.8.3,<4.0.0"
|
aiohttp = ">=3.8.3,<4.0.0"
|
||||||
dataclasses-json = ">=0.5.7,<0.7"
|
dataclasses-json = ">=0.5.7,<0.7"
|
||||||
langchain-core = ">=0.1.52,<0.2.0"
|
langchain = ">=0.2.6,<0.3.0"
|
||||||
|
langchain-core = ">=0.2.10,<0.3.0"
|
||||||
langsmith = ">=0.1.0,<0.2.0"
|
langsmith = ">=0.1.0,<0.2.0"
|
||||||
numpy = ">=1,<2"
|
numpy = [
|
||||||
|
{version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""},
|
||||||
|
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||||
|
]
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=5.3"
|
||||||
requests = ">=2,<3"
|
requests = ">=2,<3"
|
||||||
SQLAlchemy = ">=1.4,<3"
|
SQLAlchemy = ">=1.4,<3"
|
||||||
tenacity = ">=8.1.0,<9.0.0"
|
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
|
||||||
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-core"
|
name = "langchain-core"
|
||||||
version = "0.1.52"
|
version = "0.2.11"
|
||||||
description = "Building applications with LLMs through composability"
|
description = "Building applications with LLMs through composability"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langchain_core-0.1.52-py3-none-any.whl", hash = "sha256:62566749c92e8a1181c255c788548dc16dbc319d896cd6b9c95dc17af9b2a6db"},
|
{file = "langchain_core-0.2.11-py3-none-any.whl", hash = "sha256:c7ca4dc4d88e3c69fd7916c95a7027c2b1a11c2db5a51141c3ceb8afac212208"},
|
||||||
{file = "langchain_core-0.1.52.tar.gz", hash = "sha256:084c3fc452f5a6966c28ab3ec5dbc8b8d26fc3f63378073928f4e29d90b6393f"},
|
{file = "langchain_core-0.2.11.tar.gz", hash = "sha256:7a4661b50604eeb20c3373fbfd8a4f1b74482a6ab4e0f9df11e96821ead8ef0c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
jsonpatch = ">=1.33,<2.0"
|
jsonpatch = ">=1.33,<2.0"
|
||||||
langsmith = ">=0.1.0,<0.2.0"
|
langsmith = ">=0.1.75,<0.2.0"
|
||||||
packaging = ">=23.2,<24.0"
|
packaging = ">=23.2,<25"
|
||||||
pydantic = ">=1,<3"
|
pydantic = [
|
||||||
|
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||||
|
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||||
|
]
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=5.3"
|
||||||
tenacity = ">=8.1.0,<9.0.0"
|
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
extended-testing = ["jinja2 (>=3,<4)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-openai"
|
name = "langchain-openai"
|
||||||
version = "0.1.7"
|
version = "0.1.14"
|
||||||
description = "An integration package connecting OpenAI and LangChain"
|
description = "An integration package connecting OpenAI and LangChain"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langchain_openai-0.1.7-py3-none-any.whl", hash = "sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec"},
|
{file = "langchain_openai-0.1.14-py3-none-any.whl", hash = "sha256:fcd34cc5b5713798908a5828d364b4426e3b1afccbd564d344e5477acb86634a"},
|
||||||
{file = "langchain_openai-0.1.7.tar.gz", hash = "sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"},
|
{file = "langchain_openai-0.1.14.tar.gz", hash = "sha256:1f13d6041e8bedddf6eb47ccad7416e05af38fa169324f7f1bdf4f385780f8d8"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
langchain-core = ">=0.1.46,<0.3"
|
langchain-core = ">=0.2.2,<0.3"
|
||||||
openai = ">=1.24.0,<2.0.0"
|
openai = ">=1.32.0,<2.0.0"
|
||||||
tiktoken = ">=0.7,<1"
|
tiktoken = ">=0.7,<1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-text-splitters"
|
name = "langchain-text-splitters"
|
||||||
version = "0.0.2"
|
version = "0.2.2"
|
||||||
description = "LangChain text splitting utilities"
|
description = "LangChain text splitting utilities"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"},
|
{file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"},
|
||||||
{file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"},
|
{file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
langchain-core = ">=0.1.28,<0.3"
|
langchain-core = ">=0.2.10,<0.3.0"
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langsmith"
|
name = "langsmith"
|
||||||
version = "0.1.82"
|
version = "0.1.83"
|
||||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "<4.0,>=3.8.1"
|
python-versions = "<4.0,>=3.8.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "langsmith-0.1.82-py3-none-any.whl", hash = "sha256:9b3653e7d316036b0c60bf0bc3e280662d660f485a4ebd8e5c9d84f9831ae79c"},
|
{file = "langsmith-0.1.83-py3-none-any.whl", hash = "sha256:f54d8cd8479b648b6339f3f735d19292c3516d080f680933ecdca3eab4b67ed3"},
|
||||||
{file = "langsmith-0.1.82.tar.gz", hash = "sha256:c02e2bbc488c10c13b52c69d271eb40bd38da078d37b6ae7ae04a18bd48140be"},
|
{file = "langsmith-0.1.83.tar.gz", hash = "sha256:5cdd947212c8ad19adb992c06471c860185a777daa6859bb47150f90daf64bf3"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
orjson = ">=3.9.14,<4.0.0"
|
orjson = ">=3.9.14,<4.0.0"
|
||||||
pydantic = [
|
pydantic = [
|
||||||
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
|
||||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||||
|
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||||
]
|
]
|
||||||
requests = ">=2,<3"
|
requests = ">=2,<3"
|
||||||
|
|
||||||
@@ -2742,13 +2747,13 @@ pyyaml = ">=5.1"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mkdocs-material"
|
name = "mkdocs-material"
|
||||||
version = "9.5.27"
|
version = "9.5.28"
|
||||||
description = "Documentation that simply works"
|
description = "Documentation that simply works"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "mkdocs_material-9.5.27-py3-none-any.whl", hash = "sha256:af8cc263fafa98bb79e9e15a8c966204abf15164987569bd1175fd66a7705182"},
|
{file = "mkdocs_material-9.5.28-py3-none-any.whl", hash = "sha256:ff48b11b2a9f705dd210409ec3b418ab443dd36d96915bcba45a41f10ea27bfd"},
|
||||||
{file = "mkdocs_material-9.5.27.tar.gz", hash = "sha256:a7d4a35f6d4a62b0c43a0cfe7e987da0980c13587b5bc3c26e690ad494427ec0"},
|
{file = "mkdocs_material-9.5.28.tar.gz", hash = "sha256:9cba305283ad1600e3d0a67abe72d7a058b54793b47be39930911a588fe0336b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -3214,13 +3219,13 @@ sympy = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openai"
|
name = "openai"
|
||||||
version = "1.35.7"
|
version = "1.35.10"
|
||||||
description = "The official Python library for the openai API"
|
description = "The official Python library for the openai API"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7.1"
|
python-versions = ">=3.7.1"
|
||||||
files = [
|
files = [
|
||||||
{file = "openai-1.35.7-py3-none-any.whl", hash = "sha256:3d1e0b0aac9b0db69a972d36dc7efa7563f8e8d65550b27a48f2a0c2ec207e80"},
|
{file = "openai-1.35.10-py3-none-any.whl", hash = "sha256:962cb5c23224b5cbd16078308dabab97a08b0a5ad736a4fdb3dc2ffc44ac974f"},
|
||||||
{file = "openai-1.35.7.tar.gz", hash = "sha256:009bfa1504c9c7ef64d87be55936d142325656bbc6d98c68b669d6472e4beb09"},
|
{file = "openai-1.35.10.tar.gz", hash = "sha256:85966949f4f960f3e4b239a659f9fd64d3a97ecc43c44dc0a044b5c7f11cccc6"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -3419,57 +3424,62 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "orjson"
|
name = "orjson"
|
||||||
version = "3.10.5"
|
version = "3.10.6"
|
||||||
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
|
{file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"},
|
||||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
|
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"},
|
||||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
|
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"},
|
||||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
|
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"},
|
||||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
|
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"},
|
||||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
|
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"},
|
||||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
|
{file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"},
|
||||||
{file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
|
{file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"},
|
||||||
{file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
|
{file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
|
{file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
|
{file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
|
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
|
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
|
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
|
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"},
|
||||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
|
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"},
|
||||||
{file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
|
{file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"},
|
||||||
{file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
|
{file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"},
|
{file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"},
|
{file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"},
|
{file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"},
|
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"},
|
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"},
|
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"},
|
||||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"},
|
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"},
|
||||||
{file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"},
|
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"},
|
||||||
{file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"},
|
{file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"},
|
{file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"},
|
{file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"},
|
{file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"},
|
{file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"},
|
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"},
|
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"},
|
||||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"},
|
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"},
|
||||||
{file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"},
|
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"},
|
||||||
{file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"},
|
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
|
{file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
|
{file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
|
{file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
|
{file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
|
{file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
|
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"},
|
||||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
|
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"},
|
||||||
{file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
|
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"},
|
||||||
{file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
|
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"},
|
||||||
{file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
|
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"},
|
||||||
|
{file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"},
|
||||||
|
{file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"},
|
||||||
|
{file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"},
|
||||||
|
{file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"},
|
||||||
|
{file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3777,6 +3787,34 @@ files = [
|
|||||||
{file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
|
{file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "psutil"
|
||||||
|
version = "5.9.8"
|
||||||
|
description = "Cross-platform lib for process and system monitoring in Python."
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||||
|
files = [
|
||||||
|
{file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"},
|
||||||
|
{file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"},
|
||||||
|
{file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"},
|
||||||
|
{file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"},
|
||||||
|
{file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"},
|
||||||
|
{file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"},
|
||||||
|
{file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"},
|
||||||
|
{file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"},
|
||||||
|
{file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"},
|
||||||
|
{file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"},
|
||||||
|
{file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"},
|
||||||
|
{file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"},
|
||||||
|
{file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"},
|
||||||
|
{file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"},
|
||||||
|
{file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"},
|
||||||
|
{file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pulsar-client"
|
name = "pulsar-client"
|
||||||
version = "3.5.0"
|
version = "3.5.0"
|
||||||
@@ -3921,21 +3959,21 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pydantic"
|
name = "pydantic"
|
||||||
version = "2.8.0"
|
version = "2.8.2"
|
||||||
description = "Data validation using Python type hints"
|
description = "Data validation using Python type hints"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pydantic-2.8.0-py3-none-any.whl", hash = "sha256:ead4f3a1e92386a734ca1411cb25d94147cf8778ed5be6b56749047676d6364e"},
|
{file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"},
|
||||||
{file = "pydantic-2.8.0.tar.gz", hash = "sha256:d970ffb9d030b710795878940bd0489842c638e7252fc4a19c3ae2f7da4d6141"},
|
{file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
annotated-types = ">=0.4.0"
|
annotated-types = ">=0.4.0"
|
||||||
pydantic-core = "2.20.0"
|
pydantic-core = "2.20.1"
|
||||||
typing-extensions = [
|
typing-extensions = [
|
||||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
|
||||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||||
|
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@@ -3943,99 +3981,100 @@ email = ["email-validator (>=2.0.0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pydantic-core"
|
name = "pydantic-core"
|
||||||
version = "2.20.0"
|
version = "2.20.1"
|
||||||
description = "Core functionality for Pydantic validation and serialization"
|
description = "Core functionality for Pydantic validation and serialization"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e9dcd7fb34f7bfb239b5fa420033642fff0ad676b765559c3737b91f664d4fa9"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:649a764d9b0da29816889424697b2a3746963ad36d3e0968784ceed6e40c6355"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7701df088d0b05f3460f7ba15aec81ac8b0fb5690367dfd072a6c38cf5b7fdb5"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab760f17c3e792225cdaef31ca23c0aea45c14ce80d8eff62503f86a5ab76bff"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb1ad5b4d73cde784cf64580166568074f5ccd2548d765e690546cff3d80937d"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b81ec2efc04fc1dbf400647d4357d64fb25543bae38d2d19787d69360aad21c9"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4a9732a5cad764ba37f3aa873dccb41b584f69c347a57323eda0930deec8e10"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dc85b9e10cc21d9c1055f15684f76fa4facadddcb6cd63abab702eb93c98943"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:21d9f7e24f63fdc7118e6cc49defaab8c1d27570782f7e5256169d77498cf7c7"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b315685832ab9287e6124b5d74fc12dda31e6421d7f6b08525791452844bc2d"},
|
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-none-win32.whl", hash = "sha256:c3dc8ec8b87c7ad534c75b8855168a08a7036fdb9deeeed5705ba9410721c84d"},
|
{file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"},
|
||||||
{file = "pydantic_core-2.20.0-cp310-none-win_amd64.whl", hash = "sha256:85770b4b37bb36ef93a6122601795231225641003e0318d23c6233c59b424279"},
|
{file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:58e251bb5a5998f7226dc90b0b753eeffa720bd66664eba51927c2a7a2d5f32c"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:78d584caac52c24240ef9ecd75de64c760bbd0e20dbf6973631815e3ef16ef8b"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5084ec9721f82bef5ff7c4d1ee65e1626783abb585f8c0993833490b63fe1792"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d0f52684868db7c218437d260e14d37948b094493f2646f22d3dda7229bbe3f"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1def125d59a87fe451212a72ab9ed34c118ff771e5473fef4f2f95d8ede26d75"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34480fd6778ab356abf1e9086a4ced95002a1e195e8d2fd182b0def9d944d11"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d42669d319db366cb567c3b444f43caa7ffb779bf9530692c6f244fc635a41eb"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53b06aea7a48919a254b32107647be9128c066aaa6ee6d5d08222325f25ef175"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1f038156b696a1c39d763b2080aeefa87ddb4162c10aa9fabfefffc3dd8180fa"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3f0f3a4a23717280a5ee3ac4fb1f81d6fde604c9ec5100f7f6f987716bb8c137"},
|
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-none-win32.whl", hash = "sha256:316fe7c3fec017affd916a0c83d6f1ec697cbbbdf1124769fa73328e7907cc2e"},
|
{file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"},
|
||||||
{file = "pydantic_core-2.20.0-cp311-none-win_amd64.whl", hash = "sha256:2d06a7fa437f93782e3f32d739c3ec189f82fca74336c08255f9e20cea1ed378"},
|
{file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d6f8c49657f3eb7720ed4c9b26624063da14937fc94d1812f1e04a2204db3e17"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad1bd2f377f56fec11d5cfd0977c30061cd19f4fa199bf138b200ec0d5e27eeb"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed741183719a5271f97d93bbcc45ed64619fa38068aaa6e90027d1d17e30dc8d"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d82e5ed3a05f2dcb89c6ead2fd0dbff7ac09bc02c1b4028ece2d3a3854d049ce"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2ba34a099576234671f2e4274e5bc6813b22e28778c216d680eabd0db3f7dad"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:879ae6bb08a063b3e1b7ac8c860096d8fd6b48dd9b2690b7f2738b8c835e744b"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0eefc7633a04c0694340aad91fbfd1986fe1a1e0c63a22793ba40a18fcbdc8"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73deadd6fd8a23e2f40b412b3ac617a112143c8989a4fe265050fd91ba5c0608"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:35681445dc85446fb105943d81ae7569aa7e89de80d1ca4ac3229e05c311bdb1"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0f6dd3612a3b9f91f2e63924ea18a4476656c6d01843ca20a4c09e00422195af"},
|
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-none-win32.whl", hash = "sha256:7e37b6bb6e90c2b8412b06373c6978d9d81e7199a40e24a6ef480e8acdeaf918"},
|
{file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"},
|
||||||
{file = "pydantic_core-2.20.0-cp312-none-win_amd64.whl", hash = "sha256:7d4df13d1c55e84351fab51383520b84f490740a9f1fec905362aa64590b7a5d"},
|
{file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:d43e7ab3b65e4dc35a7612cfff7b0fd62dce5bc11a7cd198310b57f39847fd6c"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b6a24d7b5893392f2b8e3b7a0031ae3b14c6c1942a4615f0d8794fdeeefb08b"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2f13c3e955a087c3ec86f97661d9f72a76e221281b2262956af381224cfc243"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72432fd6e868c8d0a6849869e004b8bcae233a3c56383954c228316694920b38"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d70a8ff2d4953afb4cbe6211f17268ad29c0b47e73d3372f40e7775904bc28fc"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e49524917b8d3c2f42cd0d2df61178e08e50f5f029f9af1f402b3ee64574392"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4f0f71653b1c1bad0350bc0b4cc057ab87b438ff18fa6392533811ebd01439c"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:16197e6f4fdecb9892ed2436e507e44f0a1aa2cff3b9306d1c879ea2f9200997"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:763602504bf640b3ded3bba3f8ed8a1cc2fc6a87b8d55c1c5689f428c49c947e"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-none-win32.whl", hash = "sha256:a3f243f318bd9523277fa123b3163f4c005a3e8619d4b867064de02f287a564d"},
|
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"},
|
||||||
{file = "pydantic_core-2.20.0-cp313-none-win_amd64.whl", hash = "sha256:03aceaf6a5adaad3bec2233edc5a7905026553916615888e53154807e404545c"},
|
{file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d6f2d8b8da1f03f577243b07bbdd3412eee3d37d1f2fd71d1513cbc76a8c1239"},
|
{file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a272785a226869416c6b3c1b7e450506152d3844207331f02f27173562c917e0"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efbb412d55a4ffe73963fed95c09ccb83647ec63b711c4b3752be10a56f0090b"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e4f46189d8740561b43655263a41aac75ff0388febcb2c9ec4f1b60a0ec12f3"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3df115f4a3c8c5e4d5acf067d399c6466d7e604fc9ee9acbe6f0c88a0c3cf"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a340d2bdebe819d08f605e9705ed551c3feb97e4fd71822d7147c1e4bdbb9508"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:616b9c2f882393d422ba11b40e72382fe975e806ad693095e9a3b67c59ea6150"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25c46bb2ff6084859bbcfdf4f1a63004b98e88b6d04053e8bf324e115398e9e7"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:23425eccef8f2c342f78d3a238c824623836c6c874d93c726673dbf7e56c78c0"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:52527e8f223ba29608d999d65b204676398009725007c9336651c2ec2d93cffc"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-none-win32.whl", hash = "sha256:1c3c5b7f70dd19a6845292b0775295ea81c61540f68671ae06bfe4421b3222c2"},
|
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"},
|
||||||
{file = "pydantic_core-2.20.0-cp38-none-win_amd64.whl", hash = "sha256:8093473d7b9e908af1cef30025609afc8f5fd2a16ff07f97440fd911421e4432"},
|
{file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ee7785938e407418795e4399b2bf5b5f3cf6cf728077a7f26973220d58d885cf"},
|
{file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e75794883d635071cf6b4ed2a5d7a1e50672ab7a051454c76446ef1ebcdcc91"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:344e352c96e53b4f56b53d24728217c69399b8129c16789f70236083c6ceb2ac"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:978d4123ad1e605daf1ba5e01d4f235bcf7b6e340ef07e7122e8e9cfe3eb61ab"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c05eaf6c863781eb834ab41f5963604ab92855822a2062897958089d1335dad"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc7e43b4a528ffca8c9151b6a2ca34482c2fdc05e6aa24a84b7f475c896fc51d"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658287a29351166510ebbe0a75c373600cc4367a3d9337b964dada8d38bcc0f4"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1dacf660d6de692fe351e8c806e7efccf09ee5184865893afbe8e59be4920b4a"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3e147fc6e27b9a487320d78515c5f29798b539179f7777018cedf51b7749e4f4"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c867230d715a3dd1d962c8d9bef0d3168994ed663e21bf748b6e3a529a129aab"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-none-win32.whl", hash = "sha256:22b813baf0dbf612752d8143a2dbf8e33ccb850656b7850e009bad2e101fc377"},
|
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"},
|
||||||
{file = "pydantic_core-2.20.0-cp39-none-win_amd64.whl", hash = "sha256:3a7235b46c1bbe201f09b6f0f5e6c36b16bad3d0532a10493742f91fbdc8035f"},
|
{file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cafde15a6f7feaec2f570646e2ffc5b73412295d29134a29067e70740ec6ee20"},
|
{file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2aec8eeea0b08fd6bc2213d8e86811a07491849fd3d79955b62d83e32fa2ad5f"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840200827984f1c4e114008abc2f5ede362d6e11ed0b5931681884dd41852ff1"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ea1d8b7df522e5ced34993c423c3bf3735c53df8b2a15688a2f03a7d678800"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5b8376a867047bf08910573deb95d3c8dfb976eb014ee24f3b5a61ccc5bee1b"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d08264b4460326cefacc179fc1411304d5af388a79910832835e6f641512358b"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7a3639011c2e8a9628466f616ed7fb413f30032b891898e10895a0a8b5857d6c"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
|
||||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:05e83ce2f7eba29e627dd8066aa6c4c0269b2d4f889c0eba157233a353053cea"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:603a843fea76a595c8f661cd4da4d2281dff1e38c4a836a928eac1a2f8fe88e4"},
|
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac76f30d5d3454f4c28826d891fe74d25121a346c69523c9810ebba43f3b1cec"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e3b1d4b1b3f6082849f9b28427ef147a5b46a6132a3dbaf9ca1baa40c88609"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2761f71faed820e25ec62eacba670d1b5c2709bb131a19fcdbfbb09884593e5a"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a0586cddbf4380e24569b8a05f234e7305717cc8323f50114dfb2051fcbce2a3"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b8c46a8cf53e849eea7090f331ae2202cd0f1ceb090b00f5902c423bd1e11805"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b4a085bd04af7245e140d1b95619fe8abb445a3d7fdf219b3f80c940853268ef"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
|
||||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:116b326ac82c8b315e7348390f6d30bcfe6e688a7d3f1de50ff7bcc2042a23c2"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
|
||||||
{file = "pydantic_core-2.20.0.tar.gz", hash = "sha256:366be8e64e0cb63d87cf79b4e1765c0703dd6313c729b22e7b9e378db6b96877"},
|
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
|
||||||
|
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -4163,13 +4202,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyright"
|
name = "pyright"
|
||||||
version = "1.1.369"
|
version = "1.1.370"
|
||||||
description = "Command line wrapper for pyright"
|
description = "Command line wrapper for pyright"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "pyright-1.1.369-py3-none-any.whl", hash = "sha256:06d5167a8d7be62523ced0265c5d2f1e022e110caf57a25d92f50fb2d07bcda0"},
|
{file = "pyright-1.1.370-py3-none-any.whl", hash = "sha256:fc721601e480a69989775bfc210534a6ca0110ebd0c065244a8d3a151294fc61"},
|
||||||
{file = "pyright-1.1.369.tar.gz", hash = "sha256:ad290710072d021e213b98cc7a2f90ae3a48609ef5b978f749346d1a47eb9af8"},
|
{file = "pyright-1.1.370.tar.gz", hash = "sha256:d0d559d506fc41e3297f721aaa05a1b9f06beda5acc9ac64ca371ce94c28f960"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -4588,13 +4627,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests"
|
name = "requests"
|
||||||
version = "2.32.3"
|
version = "2.31.0"
|
||||||
description = "Python HTTP for Humans."
|
description = "Python HTTP for Humans."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
|
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||||
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
|
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -4994,13 +5033,13 @@ widechars = ["wcwidth"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tenacity"
|
name = "tenacity"
|
||||||
version = "8.4.2"
|
version = "8.5.0"
|
||||||
description = "Retry code until it succeeds"
|
description = "Retry code until it succeeds"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"},
|
{file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"},
|
||||||
{file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"},
|
{file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@@ -5227,13 +5266,13 @@ telegram = ["requests"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "trio"
|
name = "trio"
|
||||||
version = "0.25.1"
|
version = "0.26.0"
|
||||||
description = "A friendly Python library for async concurrency and I/O"
|
description = "A friendly Python library for async concurrency and I/O"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "trio-0.25.1-py3-none-any.whl", hash = "sha256:e42617ba091e7b2e50c899052e83a3c403101841de925187f61e7b7eaebdf3fb"},
|
{file = "trio-0.26.0-py3-none-any.whl", hash = "sha256:bb9c1b259591af941fccfbabbdc65bc7ed764bd2db76428454c894cd5e3d2032"},
|
||||||
{file = "trio-0.25.1.tar.gz", hash = "sha256:9f5314f014ea3af489e77b001861c535005c3858d38ec46b6b071ebfa339d7fb"},
|
{file = "trio-0.26.0.tar.gz", hash = "sha256:67c5ec3265dd4abc7b1d1ab9ca4fe4c25b896f9c93dac73713778adab487f9c4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -6028,9 +6067,10 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke
|
|||||||
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
||||||
|
|
||||||
[extras]
|
[extras]
|
||||||
|
agentops = ["agentops"]
|
||||||
tools = ["crewai-tools"]
|
tools = ["crewai-tools"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.10,<=3.13"
|
python-versions = ">=3.10,<=3.13"
|
||||||
content-hash = "d4ea0d71723ecc2bad629c387dd786b6a96c553b1e3e298516fdfcd2059d1019"
|
content-hash = "4f3e5fddb5f0fc8fd143a8abe947ecac443213d595bd0eeed745ccb82dac2312"
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "crewai"
|
name = "crewai"
|
||||||
version = "0.35.7"
|
version = "0.36.1"
|
||||||
description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
|
description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
|
||||||
authors = ["Joao Moura <joao@crewai.com>"]
|
authors = ["Joao Moura <joao@crewai.com>"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@@ -14,22 +14,24 @@ Repository = "https://github.com/joaomdmoura/crewai"
|
|||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.10,<=3.13"
|
python = ">=3.10,<=3.13"
|
||||||
pydantic = "^2.4.2"
|
pydantic = "^2.4.2"
|
||||||
langchain = ">=0.1.4,<0.2.0"
|
langchain = ">0.2,<=0.3"
|
||||||
openai = "^1.13.3"
|
openai = "^1.13.3"
|
||||||
opentelemetry-api = "^1.22.0"
|
opentelemetry-api = "^1.22.0"
|
||||||
opentelemetry-sdk = "^1.22.0"
|
opentelemetry-sdk = "^1.22.0"
|
||||||
opentelemetry-exporter-otlp-proto-http = "^1.22.0"
|
opentelemetry-exporter-otlp-proto-http = "^1.22.0"
|
||||||
instructor = "1.3.3"
|
instructor = "1.3.3"
|
||||||
regex = "^2023.12.25"
|
regex = "^2023.12.25"
|
||||||
crewai-tools = { version = "^0.4.5", optional = true }
|
crewai-tools = { version = "^0.4.7", optional = true }
|
||||||
click = "^8.1.7"
|
click = "^8.1.7"
|
||||||
python-dotenv = "^1.0.0"
|
python-dotenv = "^1.0.0"
|
||||||
appdirs = "^1.4.4"
|
appdirs = "^1.4.4"
|
||||||
jsonref = "^1.1.0"
|
jsonref = "^1.1.0"
|
||||||
embedchain = "^0.1.113"
|
agentops = { version = "^0.1.9", optional = true }
|
||||||
|
embedchain = "^0.1.114"
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
tools = ["crewai-tools"]
|
tools = ["crewai-tools"]
|
||||||
|
agentops = ["agentops"]
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
isort = "^5.13.2"
|
isort = "^5.13.2"
|
||||||
@@ -43,7 +45,7 @@ mkdocs-material = { extras = ["imaging"], version = "^9.5.7" }
|
|||||||
mkdocs-material-extensions = "^1.3.1"
|
mkdocs-material-extensions = "^1.3.1"
|
||||||
pillow = "^10.2.0"
|
pillow = "^10.2.0"
|
||||||
cairosvg = "^2.7.1"
|
cairosvg = "^2.7.1"
|
||||||
crewai-tools = "^0.4.5"
|
crewai-tools = "^0.4.7"
|
||||||
|
|
||||||
[tool.poetry.group.test.dependencies]
|
[tool.poetry.group.test.dependencies]
|
||||||
pytest = "^8.0.0"
|
pytest = "^8.0.0"
|
||||||
|
|||||||
@@ -7,19 +7,31 @@ from langchain.tools.render import render_text_description
|
|||||||
from langchain_core.agents import AgentAction
|
from langchain_core.agents import AgentAction
|
||||||
from langchain_core.callbacks import BaseCallbackHandler
|
from langchain_core.callbacks import BaseCallbackHandler
|
||||||
from langchain_openai import ChatOpenAI
|
from langchain_openai import ChatOpenAI
|
||||||
|
|
||||||
from pydantic import Field, InstanceOf, model_validator
|
from pydantic import Field, InstanceOf, model_validator
|
||||||
|
|
||||||
from crewai.agents import CacheHandler, CrewAgentExecutor, CrewAgentParser
|
from crewai.agents import CacheHandler, CrewAgentExecutor, CrewAgentParser
|
||||||
|
from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||||
from crewai.memory.contextual.contextual_memory import ContextualMemory
|
from crewai.memory.contextual.contextual_memory import ContextualMemory
|
||||||
from crewai.tools.agent_tools import AgentTools
|
from crewai.tools.agent_tools import AgentTools
|
||||||
from crewai.utilities import Prompts, Converter
|
from crewai.utilities import Converter, Prompts
|
||||||
from crewai.utilities.constants import TRAINED_AGENTS_DATA_FILE, TRAINING_DATA_FILE
|
from crewai.utilities.constants import TRAINED_AGENTS_DATA_FILE, TRAINING_DATA_FILE
|
||||||
from crewai.utilities.token_counter_callback import TokenCalcHandler
|
from crewai.utilities.token_counter_callback import TokenCalcHandler
|
||||||
from crewai.agents.agent_builder.base_agent import BaseAgent
|
|
||||||
from crewai.utilities.training_handler import CrewTrainingHandler
|
from crewai.utilities.training_handler import CrewTrainingHandler
|
||||||
|
|
||||||
|
agentops = None
|
||||||
|
try:
|
||||||
|
import agentops # type: ignore # Name "agentops" already defined on line 21
|
||||||
|
from agentops import track_agent
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
def track_agent():
|
||||||
|
def noop(f):
|
||||||
|
return f
|
||||||
|
|
||||||
|
return noop
|
||||||
|
|
||||||
|
|
||||||
|
@track_agent()
|
||||||
class Agent(BaseAgent):
|
class Agent(BaseAgent):
|
||||||
"""Represents an agent in a system.
|
"""Represents an agent in a system.
|
||||||
|
|
||||||
@@ -48,6 +60,8 @@ class Agent(BaseAgent):
|
|||||||
default=None,
|
default=None,
|
||||||
description="Maximum execution time for an agent to execute a task",
|
description="Maximum execution time for an agent to execute a task",
|
||||||
)
|
)
|
||||||
|
agent_ops_agent_name: str = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str")
|
||||||
|
agent_ops_agent_id: str = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str")
|
||||||
cache_handler: InstanceOf[CacheHandler] = Field(
|
cache_handler: InstanceOf[CacheHandler] = Field(
|
||||||
default=None, description="An instance of the CacheHandler class."
|
default=None, description="An instance of the CacheHandler class."
|
||||||
)
|
)
|
||||||
@@ -76,7 +90,9 @@ class Agent(BaseAgent):
|
|||||||
response_template: Optional[str] = Field(
|
response_template: Optional[str] = Field(
|
||||||
default=None, description="Response format for the agent."
|
default=None, description="Response format for the agent."
|
||||||
)
|
)
|
||||||
|
tools_results: Optional[List[Any]] = Field(
|
||||||
|
default=[], description="Results of the tools used by the agent."
|
||||||
|
)
|
||||||
allow_code_execution: Optional[bool] = Field(
|
allow_code_execution: Optional[bool] = Field(
|
||||||
default=False, description="Enable code execution for the agent."
|
default=False, description="Enable code execution for the agent."
|
||||||
)
|
)
|
||||||
@@ -84,10 +100,11 @@ class Agent(BaseAgent):
|
|||||||
def __init__(__pydantic_self__, **data):
|
def __init__(__pydantic_self__, **data):
|
||||||
config = data.pop("config", {})
|
config = data.pop("config", {})
|
||||||
super().__init__(**config, **data)
|
super().__init__(**config, **data)
|
||||||
|
__pydantic_self__.agent_ops_agent_name = __pydantic_self__.role
|
||||||
|
|
||||||
@model_validator(mode="after")
|
@model_validator(mode="after")
|
||||||
def set_agent_executor(self) -> "Agent":
|
def set_agent_executor(self) -> "Agent":
|
||||||
"""Ensure agent executor and token process is set."""
|
"""Ensure agent executor and token process are set."""
|
||||||
if hasattr(self.llm, "model_name"):
|
if hasattr(self.llm, "model_name"):
|
||||||
token_handler = TokenCalcHandler(self.llm.model_name, self._token_process)
|
token_handler = TokenCalcHandler(self.llm.model_name, self._token_process)
|
||||||
|
|
||||||
@@ -101,6 +118,13 @@ class Agent(BaseAgent):
|
|||||||
):
|
):
|
||||||
self.llm.callbacks.append(token_handler)
|
self.llm.callbacks.append(token_handler)
|
||||||
|
|
||||||
|
if agentops and not any(
|
||||||
|
isinstance(handler, agentops.LangchainCallbackHandler)
|
||||||
|
for handler in self.llm.callbacks
|
||||||
|
):
|
||||||
|
agentops.stop_instrumenting()
|
||||||
|
self.llm.callbacks.append(agentops.LangchainCallbackHandler())
|
||||||
|
|
||||||
if not self.agent_executor:
|
if not self.agent_executor:
|
||||||
if not self.cache_handler:
|
if not self.cache_handler:
|
||||||
self.cache_handler = CacheHandler()
|
self.cache_handler = CacheHandler()
|
||||||
@@ -124,8 +148,7 @@ class Agent(BaseAgent):
|
|||||||
Output of the agent
|
Output of the agent
|
||||||
"""
|
"""
|
||||||
if self.tools_handler:
|
if self.tools_handler:
|
||||||
# type: ignore # Incompatible types in assignment (expression has type "dict[Never, Never]", variable has type "ToolCalling")
|
self.tools_handler.last_used_tool = {} # type: ignore # Incompatible types in assignment (expression has type "dict[Never, Never]", variable has type "ToolCalling")
|
||||||
self.tools_handler.last_used_tool = {}
|
|
||||||
|
|
||||||
task_prompt = task.prompt()
|
task_prompt = task.prompt()
|
||||||
|
|
||||||
@@ -145,8 +168,8 @@ class Agent(BaseAgent):
|
|||||||
task_prompt += self.i18n.slice("memory").format(memory=memory)
|
task_prompt += self.i18n.slice("memory").format(memory=memory)
|
||||||
|
|
||||||
tools = tools or self.tools
|
tools = tools or self.tools
|
||||||
# type: ignore # Argument 1 to "_parse_tools" of "Agent" has incompatible type "list[Any] | None"; expected "list[Any]"
|
|
||||||
parsed_tools = self._parse_tools(tools or [])
|
parsed_tools = self._parse_tools(tools or []) # type: ignore # Argument 1 to "_parse_tools" of "Agent" has incompatible type "list[Any] | None"; expected "list[Any]"
|
||||||
self.create_agent_executor(tools=tools)
|
self.create_agent_executor(tools=tools)
|
||||||
self.agent_executor.tools = parsed_tools
|
self.agent_executor.tools = parsed_tools
|
||||||
self.agent_executor.task = task
|
self.agent_executor.task = task
|
||||||
@@ -168,6 +191,14 @@ class Agent(BaseAgent):
|
|||||||
)["output"]
|
)["output"]
|
||||||
if self.max_rpm:
|
if self.max_rpm:
|
||||||
self._rpm_controller.stop_rpm_counter()
|
self._rpm_controller.stop_rpm_counter()
|
||||||
|
|
||||||
|
# If there was any tool in self.tools_results that had result_as_answer
|
||||||
|
# set to True, return the results of the last tool that had
|
||||||
|
# result_as_answer set to True
|
||||||
|
for tool_result in self.tools_results: # type: ignore # Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable)
|
||||||
|
if tool_result.get("result_as_answer", False):
|
||||||
|
result = tool_result["result"]
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def format_log_to_str(
|
def format_log_to_str(
|
||||||
@@ -268,7 +299,7 @@ class Agent(BaseAgent):
|
|||||||
def get_output_converter(self, llm, text, model, instructions):
|
def get_output_converter(self, llm, text, model, instructions):
|
||||||
return Converter(llm=llm, text=text, model=model, instructions=instructions)
|
return Converter(llm=llm, text=text, model=model, instructions=instructions)
|
||||||
|
|
||||||
def _parse_tools(self, tools: List[Any]) -> List[LangChainTool]:
|
def _parse_tools(self, tools: List[Any]) -> List[LangChainTool]: # type: ignore # Function "langchain_core.tools.tool" is not valid as a type
|
||||||
"""Parse tools to be used for the task."""
|
"""Parse tools to be used for the task."""
|
||||||
tools_list = []
|
tools_list = []
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,22 +1,26 @@
|
|||||||
from copy import deepcopy
|
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from copy import copy as shallow_copy
|
||||||
|
from typing import Any, Dict, List, Optional, TypeVar
|
||||||
|
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
UUID4,
|
UUID4,
|
||||||
BaseModel,
|
BaseModel,
|
||||||
|
ConfigDict,
|
||||||
Field,
|
Field,
|
||||||
InstanceOf,
|
InstanceOf,
|
||||||
|
PrivateAttr,
|
||||||
field_validator,
|
field_validator,
|
||||||
model_validator,
|
model_validator,
|
||||||
ConfigDict,
|
|
||||||
PrivateAttr,
|
|
||||||
)
|
)
|
||||||
from pydantic_core import PydanticCustomError
|
from pydantic_core import PydanticCustomError
|
||||||
|
|
||||||
from crewai.utilities import I18N, RPMController, Logger
|
from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess
|
||||||
from crewai.agents import CacheHandler, ToolsHandler
|
from crewai.agents.cache.cache_handler import CacheHandler
|
||||||
from crewai.utilities.token_counter_callback import TokenProcess
|
from crewai.agents.tools_handler import ToolsHandler
|
||||||
|
from crewai.utilities import I18N, Logger, RPMController
|
||||||
|
|
||||||
|
T = TypeVar("T", bound="BaseAgent")
|
||||||
|
|
||||||
|
|
||||||
class BaseAgent(ABC, BaseModel):
|
class BaseAgent(ABC, BaseModel):
|
||||||
@@ -187,6 +191,31 @@ class BaseAgent(ABC, BaseModel):
|
|||||||
"""Get the converter class for the agent to create json/pydantic outputs."""
|
"""Get the converter class for the agent to create json/pydantic outputs."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def copy(self: T) -> T: # type: ignore # Signature of "copy" incompatible with supertype "BaseModel"
|
||||||
|
"""Create a deep copy of the Agent."""
|
||||||
|
exclude = {
|
||||||
|
"id",
|
||||||
|
"_logger",
|
||||||
|
"_rpm_controller",
|
||||||
|
"_request_within_rpm_limit",
|
||||||
|
"_token_process",
|
||||||
|
"agent_executor",
|
||||||
|
"tools",
|
||||||
|
"tools_handler",
|
||||||
|
"cache_handler",
|
||||||
|
"llm",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Copy llm and clear callbacks
|
||||||
|
existing_llm = shallow_copy(self.llm)
|
||||||
|
existing_llm.callbacks = []
|
||||||
|
copied_data = self.model_dump(exclude=exclude)
|
||||||
|
copied_data = {k: v for k, v in copied_data.items() if v is not None}
|
||||||
|
|
||||||
|
copied_agent = type(self)(**copied_data, llm=existing_llm, tools=self.tools)
|
||||||
|
|
||||||
|
return copied_agent
|
||||||
|
|
||||||
def interpolate_inputs(self, inputs: Dict[str, Any]) -> None:
|
def interpolate_inputs(self, inputs: Dict[str, Any]) -> None:
|
||||||
"""Interpolate inputs into the agent description and backstory."""
|
"""Interpolate inputs into the agent description and backstory."""
|
||||||
if self._original_role is None:
|
if self._original_role is None:
|
||||||
@@ -216,35 +245,6 @@ class BaseAgent(ABC, BaseModel):
|
|||||||
def increment_formatting_errors(self) -> None:
|
def increment_formatting_errors(self) -> None:
|
||||||
self.formatting_errors += 1
|
self.formatting_errors += 1
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
exclude = {
|
|
||||||
"id",
|
|
||||||
"_logger",
|
|
||||||
"_rpm_controller",
|
|
||||||
"_request_within_rpm_limit",
|
|
||||||
"token_process",
|
|
||||||
"agent_executor",
|
|
||||||
"tools",
|
|
||||||
"tools_handler",
|
|
||||||
"cache_handler",
|
|
||||||
"crew",
|
|
||||||
"llm",
|
|
||||||
}
|
|
||||||
|
|
||||||
copied_data = self.model_dump(exclude=exclude, exclude_unset=True)
|
|
||||||
copied_agent = self.__class__(**copied_data)
|
|
||||||
|
|
||||||
# Copy mutable attributes separately
|
|
||||||
copied_agent.tools = deepcopy(self.tools)
|
|
||||||
copied_agent.config = deepcopy(self.config)
|
|
||||||
|
|
||||||
# Preserve original values for interpolation
|
|
||||||
copied_agent._original_role = self._original_role
|
|
||||||
copied_agent._original_goal = self._original_goal
|
|
||||||
copied_agent._original_backstory = self._original_backstory
|
|
||||||
|
|
||||||
return copied_agent
|
|
||||||
|
|
||||||
def set_rpm_controller(self, rpm_controller: RPMController) -> None:
|
def set_rpm_controller(self, rpm_controller: RPMController) -> None:
|
||||||
"""Set the rpm controller for the agent.
|
"""Set the rpm controller for the agent.
|
||||||
|
|
||||||
|
|||||||
@@ -1,24 +1,44 @@
|
|||||||
import time
|
import time
|
||||||
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
from crewai.memory.entity.entity_memory_item import EntityMemoryItem
|
from crewai.memory.entity.entity_memory_item import EntityMemoryItem
|
||||||
from crewai.memory.long_term.long_term_memory_item import LongTermMemoryItem
|
from crewai.memory.long_term.long_term_memory_item import LongTermMemoryItem
|
||||||
from crewai.memory.short_term.short_term_memory_item import ShortTermMemoryItem
|
from crewai.memory.short_term.short_term_memory_item import ShortTermMemoryItem
|
||||||
from crewai.utilities.converter import ConverterError
|
from crewai.utilities.converter import ConverterError
|
||||||
from crewai.utilities.evaluators.task_evaluator import TaskEvaluator
|
from crewai.utilities.evaluators.task_evaluator import TaskEvaluator
|
||||||
|
from crewai.utilities import I18N
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from crewai.crew import Crew
|
||||||
|
from crewai.task import Task
|
||||||
|
from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||||
|
|
||||||
|
|
||||||
class CrewAgentExecutorMixin:
|
class CrewAgentExecutorMixin:
|
||||||
|
crew: Optional["Crew"]
|
||||||
|
crew_agent: Optional["BaseAgent"]
|
||||||
|
task: Optional["Task"]
|
||||||
|
iterations: int
|
||||||
|
force_answer_max_iterations: int
|
||||||
|
have_forced_answer: bool
|
||||||
|
_i18n: I18N
|
||||||
|
|
||||||
def _should_force_answer(self) -> bool:
|
def _should_force_answer(self) -> bool:
|
||||||
|
"""Determine if a forced answer is required based on iteration count."""
|
||||||
return (
|
return (
|
||||||
self.iterations == self.force_answer_max_iterations
|
self.iterations == self.force_answer_max_iterations
|
||||||
) and not self.have_forced_answer
|
) and not self.have_forced_answer
|
||||||
|
|
||||||
def _create_short_term_memory(self, output) -> None:
|
def _create_short_term_memory(self, output) -> None:
|
||||||
|
"""Create and save a short-term memory item if conditions are met."""
|
||||||
if (
|
if (
|
||||||
self.crew
|
self.crew
|
||||||
and self.crew.memory
|
and self.crew_agent
|
||||||
|
and self.task
|
||||||
and "Action: Delegate work to coworker" not in output.log
|
and "Action: Delegate work to coworker" not in output.log
|
||||||
):
|
):
|
||||||
|
try:
|
||||||
memory = ShortTermMemoryItem(
|
memory = ShortTermMemoryItem(
|
||||||
data=output.log,
|
data=output.log,
|
||||||
agent=self.crew_agent.role,
|
agent=self.crew_agent.role,
|
||||||
@@ -26,10 +46,26 @@ class CrewAgentExecutorMixin:
|
|||||||
"observation": self.task.description,
|
"observation": self.task.description,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
if (
|
||||||
|
hasattr(self.crew, "_short_term_memory")
|
||||||
|
and self.crew._short_term_memory
|
||||||
|
):
|
||||||
self.crew._short_term_memory.save(memory)
|
self.crew._short_term_memory.save(memory)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to add to short term memory: {e}")
|
||||||
|
pass
|
||||||
|
|
||||||
def _create_long_term_memory(self, output) -> None:
|
def _create_long_term_memory(self, output) -> None:
|
||||||
if self.crew and self.crew.memory:
|
"""Create and save long-term and entity memory items based on evaluation."""
|
||||||
|
if (
|
||||||
|
self.crew
|
||||||
|
and self.crew.memory
|
||||||
|
and self.crew._long_term_memory
|
||||||
|
and self.crew._entity_memory
|
||||||
|
and self.task
|
||||||
|
and self.crew_agent
|
||||||
|
):
|
||||||
|
try:
|
||||||
ltm_agent = TaskEvaluator(self.crew_agent)
|
ltm_agent = TaskEvaluator(self.crew_agent)
|
||||||
evaluation = ltm_agent.evaluate(self.task, output.log)
|
evaluation = ltm_agent.evaluate(self.task, output.log)
|
||||||
|
|
||||||
@@ -54,12 +90,20 @@ class CrewAgentExecutorMixin:
|
|||||||
name=entity.name,
|
name=entity.name,
|
||||||
type=entity.type,
|
type=entity.type,
|
||||||
description=entity.description,
|
description=entity.description,
|
||||||
relationships="\n".join([f"- {r}" for r in entity.relationships]),
|
relationships="\n".join(
|
||||||
|
[f"- {r}" for r in entity.relationships]
|
||||||
|
),
|
||||||
)
|
)
|
||||||
self.crew._entity_memory.save(entity_memory)
|
self.crew._entity_memory.save(entity_memory)
|
||||||
|
except AttributeError as e:
|
||||||
|
print(f"Missing attributes for long term memory: {e}")
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to add to long term memory: {e}")
|
||||||
|
pass
|
||||||
|
|
||||||
def _ask_human_input(self, final_answer: dict) -> str:
|
def _ask_human_input(self, final_answer: dict) -> str:
|
||||||
"""Get human input."""
|
"""Prompt human input for final decision making."""
|
||||||
return input(
|
return input(
|
||||||
self._i18n.slice("getting_input").format(final_answer=final_answer)
|
self._i18n.slice("getting_input").format(final_answer=final_answer)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import List, Optional, Union
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from crewai.agents.agent_builder.base_agent import BaseAgent
|
from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||||
from crewai.task import Task
|
from crewai.task import Task
|
||||||
from crewai.utilities import I18N
|
from crewai.utilities import I18N
|
||||||
@@ -53,7 +55,7 @@ class BaseAgentTools(BaseModel, ABC):
|
|||||||
# {"task": "....", "coworker": "...."}
|
# {"task": "....", "coworker": "...."}
|
||||||
agent_name = agent.casefold().replace('"', "").replace("\n", "")
|
agent_name = agent.casefold().replace('"', "").replace("\n", "")
|
||||||
|
|
||||||
agent = [
|
agent = [ # type: ignore # Incompatible types in assignment (expression has type "list[BaseAgent]", variable has type "str | None")
|
||||||
available_agent
|
available_agent
|
||||||
for available_agent in self.agents
|
for available_agent in self.agents
|
||||||
if available_agent.role.casefold().replace("\n", "") == agent_name
|
if available_agent.role.casefold().replace("\n", "") == agent_name
|
||||||
@@ -73,9 +75,9 @@ class BaseAgentTools(BaseModel, ABC):
|
|||||||
)
|
)
|
||||||
|
|
||||||
agent = agent[0]
|
agent = agent[0]
|
||||||
task = Task(
|
task = Task( # type: ignore # Incompatible types in assignment (expression has type "Task", variable has type "str")
|
||||||
description=task,
|
description=task,
|
||||||
agent=agent,
|
agent=agent,
|
||||||
expected_output="Your best answer to your coworker asking you this, accounting for the context shared.",
|
expected_output="Your best answer to your coworker asking you this, accounting for the context shared.",
|
||||||
)
|
)
|
||||||
return agent.execute_task(task, context)
|
return agent.execute_task(task, context) # type: ignore # "str" has no attribute "execute_task"
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, PrivateAttr
|
from pydantic import BaseModel, Field, PrivateAttr
|
||||||
|
|
||||||
|
|
||||||
@@ -27,8 +26,8 @@ class OutputConverter(BaseModel, ABC):
|
|||||||
llm: Any = Field(description="The language model to be used to convert the text.")
|
llm: Any = Field(description="The language model to be used to convert the text.")
|
||||||
model: Any = Field(description="The model to be used to convert the text.")
|
model: Any = Field(description="The model to be used to convert the text.")
|
||||||
instructions: str = Field(description="Conversion instructions to the LLM.")
|
instructions: str = Field(description="Conversion instructions to the LLM.")
|
||||||
max_attemps: Optional[int] = Field(
|
max_attempts: Optional[int] = Field(
|
||||||
description="Max number of attemps to try to get the output formated.",
|
description="Max number of attempts to try to get the output formated.",
|
||||||
default=3,
|
default=3,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -42,7 +41,7 @@ class OutputConverter(BaseModel, ABC):
|
|||||||
"""Convert text to json."""
|
"""Convert text to json."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod # type: ignore # Name "_is_gpt" already defined on line 25
|
||||||
def _is_gpt(self, llm):
|
def _is_gpt(self, llm): # type: ignore # Name "_is_gpt" already defined on line 25
|
||||||
"""Return if llm provided is of gpt from openai."""
|
"""Return if llm provided is of gpt from openai."""
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,18 +1,27 @@
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from langchain.agents import AgentExecutor
|
from langchain.agents import AgentExecutor
|
||||||
from langchain.agents.agent import ExceptionTool
|
from langchain.agents.agent import ExceptionTool
|
||||||
from langchain.callbacks.manager import CallbackManagerForChainRun
|
from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||||
from langchain_core.agents import AgentAction, AgentFinish, AgentStep
|
from langchain_core.agents import AgentAction, AgentFinish, AgentStep
|
||||||
from langchain_core.exceptions import OutputParserException
|
from langchain_core.exceptions import OutputParserException
|
||||||
|
|
||||||
from langchain_core.tools import BaseTool
|
from langchain_core.tools import BaseTool
|
||||||
from langchain_core.utils.input import get_color_mapping
|
from langchain_core.utils.input import get_color_mapping
|
||||||
from pydantic import InstanceOf
|
from pydantic import InstanceOf
|
||||||
from crewai.agents.agent_builder.base_agent_executor_mixin import CrewAgentExecutorMixin
|
|
||||||
|
|
||||||
|
from crewai.agents.agent_builder.base_agent_executor_mixin import (
|
||||||
|
CrewAgentExecutorMixin,
|
||||||
|
)
|
||||||
from crewai.agents.tools_handler import ToolsHandler
|
from crewai.agents.tools_handler import ToolsHandler
|
||||||
from crewai.tools.tool_usage import ToolUsage, ToolUsageErrorException
|
from crewai.tools.tool_usage import ToolUsage, ToolUsageErrorException
|
||||||
from crewai.utilities import I18N
|
from crewai.utilities import I18N
|
||||||
@@ -36,7 +45,7 @@ class CrewAgentExecutor(AgentExecutor, CrewAgentExecutorMixin):
|
|||||||
tools_handler: Optional[InstanceOf[ToolsHandler]] = None
|
tools_handler: Optional[InstanceOf[ToolsHandler]] = None
|
||||||
max_iterations: Optional[int] = 15
|
max_iterations: Optional[int] = 15
|
||||||
have_forced_answer: bool = False
|
have_forced_answer: bool = False
|
||||||
force_answer_max_iterations: Optional[int] = None
|
force_answer_max_iterations: Optional[int] = None # type: ignore # Incompatible types in assignment (expression has type "int | None", base class "CrewAgentExecutorMixin" defined the type as "int")
|
||||||
step_callback: Optional[Any] = None
|
step_callback: Optional[Any] = None
|
||||||
system_template: Optional[str] = None
|
system_template: Optional[str] = None
|
||||||
prompt_template: Optional[str] = None
|
prompt_template: Optional[str] = None
|
||||||
@@ -233,6 +242,7 @@ class CrewAgentExecutor(AgentExecutor, CrewAgentExecutorMixin):
|
|||||||
tools_names=self.tools_names,
|
tools_names=self.tools_names,
|
||||||
function_calling_llm=self.function_calling_llm,
|
function_calling_llm=self.function_calling_llm,
|
||||||
task=self.task,
|
task=self.task,
|
||||||
|
agent=self.crew_agent,
|
||||||
action=agent_action,
|
action=agent_action,
|
||||||
)
|
)
|
||||||
tool_calling = tool_usage.parse(agent_action.log)
|
tool_calling = tool_usage.parse(agent_action.log)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ authors = ["Your Name <you@example.com>"]
|
|||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.10,<=3.13"
|
python = ">=3.10,<=3.13"
|
||||||
crewai = { extras = ["tools"], version = "^0.35.7" }
|
crewai = { extras = ["tools"], version = "^0.36.1" }
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
{{folder_name}} = "{{folder_name}}.main:run"
|
{{folder_name}} = "{{folder_name}}.main:run"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
from langchain_core.callbacks import BaseCallbackHandler
|
from langchain_core.callbacks import BaseCallbackHandler
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
@@ -28,9 +28,15 @@ from crewai.task import Task
|
|||||||
from crewai.telemetry import Telemetry
|
from crewai.telemetry import Telemetry
|
||||||
from crewai.tools.agent_tools import AgentTools
|
from crewai.tools.agent_tools import AgentTools
|
||||||
from crewai.utilities import I18N, FileHandler, Logger, RPMController
|
from crewai.utilities import I18N, FileHandler, Logger, RPMController
|
||||||
|
from crewai.utilities.constants import TRAINED_AGENTS_DATA_FILE, TRAINING_DATA_FILE
|
||||||
from crewai.utilities.evaluators.task_evaluator import TaskEvaluator
|
from crewai.utilities.evaluators.task_evaluator import TaskEvaluator
|
||||||
from crewai.utilities.training_handler import CrewTrainingHandler
|
from crewai.utilities.training_handler import CrewTrainingHandler
|
||||||
|
|
||||||
|
try:
|
||||||
|
import agentops
|
||||||
|
except ImportError:
|
||||||
|
agentops = None
|
||||||
|
|
||||||
|
|
||||||
class Crew(BaseModel):
|
class Crew(BaseModel):
|
||||||
"""
|
"""
|
||||||
@@ -219,6 +225,33 @@ class Crew(BaseModel):
|
|||||||
agent.set_rpm_controller(self._rpm_controller)
|
agent.set_rpm_controller(self._rpm_controller)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def validate_tasks(self):
|
||||||
|
if self.process == Process.sequential:
|
||||||
|
for task in self.tasks:
|
||||||
|
if task.agent is None:
|
||||||
|
raise PydanticCustomError(
|
||||||
|
"missing_agent_in_task",
|
||||||
|
f"Sequential process error: Agent is missing in the task with the following description: {task.description}", # type: ignore # Argument of type "str" cannot be assigned to parameter "message_template" of type "LiteralString"
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def check_tasks_in_hierarchical_process_not_async(self):
|
||||||
|
"""Validates that the tasks in hierarchical process are not flagged with async_execution."""
|
||||||
|
if self.process == Process.hierarchical:
|
||||||
|
for task in self.tasks:
|
||||||
|
if task.async_execution:
|
||||||
|
raise PydanticCustomError(
|
||||||
|
"async_execution_in_hierarchical_process",
|
||||||
|
"Hierarchical process error: Tasks cannot be flagged with async_execution.",
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
def _setup_from_config(self):
|
def _setup_from_config(self):
|
||||||
assert self.config is not None, "Config should not be None."
|
assert self.config is not None, "Config should not be None."
|
||||||
|
|
||||||
@@ -257,6 +290,9 @@ class Crew(BaseModel):
|
|||||||
for agent in self.agents:
|
for agent in self.agents:
|
||||||
agent.allow_delegation = False
|
agent.allow_delegation = False
|
||||||
|
|
||||||
|
CrewTrainingHandler(TRAINING_DATA_FILE).initialize_file()
|
||||||
|
CrewTrainingHandler(TRAINED_AGENTS_DATA_FILE).initialize_file()
|
||||||
|
|
||||||
def train(self, n_iterations: int, inputs: Optional[Dict[str, Any]] = {}) -> None:
|
def train(self, n_iterations: int, inputs: Optional[Dict[str, Any]] = {}) -> None:
|
||||||
"""Trains the crew for a given number of iterations."""
|
"""Trains the crew for a given number of iterations."""
|
||||||
self._setup_for_training()
|
self._setup_for_training()
|
||||||
@@ -265,14 +301,14 @@ class Crew(BaseModel):
|
|||||||
self._train_iteration = n_iteration
|
self._train_iteration = n_iteration
|
||||||
self.kickoff(inputs=inputs)
|
self.kickoff(inputs=inputs)
|
||||||
|
|
||||||
training_data = CrewTrainingHandler("training_data.pkl").load()
|
training_data = CrewTrainingHandler(TRAINING_DATA_FILE).load()
|
||||||
|
|
||||||
for agent in self.agents:
|
for agent in self.agents:
|
||||||
result = TaskEvaluator(agent).evaluate_training_data(
|
result = TaskEvaluator(agent).evaluate_training_data(
|
||||||
training_data=training_data, agent_id=str(agent.id)
|
training_data=training_data, agent_id=str(agent.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
CrewTrainingHandler("trained_agents_data.pkl").save_trained_data(
|
CrewTrainingHandler(TRAINED_AGENTS_DATA_FILE).save_trained_data(
|
||||||
agent_id=str(agent.role), trained_data=result.model_dump()
|
agent_id=str(agent.role), trained_data=result.model_dump()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -282,29 +318,25 @@ class Crew(BaseModel):
|
|||||||
) -> Union[str, Dict[str, Any]]:
|
) -> Union[str, Dict[str, Any]]:
|
||||||
"""Starts the crew to work on its assigned tasks."""
|
"""Starts the crew to work on its assigned tasks."""
|
||||||
self._execution_span = self._telemetry.crew_execution_span(self, inputs)
|
self._execution_span = self._telemetry.crew_execution_span(self, inputs)
|
||||||
# type: ignore # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
|
||||||
self._interpolate_inputs(inputs)
|
self._interpolate_inputs(inputs) # type: ignore # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
||||||
self._set_tasks_callbacks()
|
self._set_tasks_callbacks()
|
||||||
|
|
||||||
i18n = I18N(prompt_file=self.prompt_file)
|
i18n = I18N(prompt_file=self.prompt_file)
|
||||||
|
|
||||||
for agent in self.agents:
|
for agent in self.agents:
|
||||||
# type: ignore # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
|
||||||
agent.i18n = i18n
|
agent.i18n = i18n
|
||||||
# type: ignore[attr-defined] # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
# type: ignore[attr-defined] # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
||||||
agent.crew = self # type: ignore[attr-defined]
|
agent.crew = self # type: ignore[attr-defined]
|
||||||
# TODO: Create an AgentFunctionCalling protocol for future refactoring
|
# TODO: Create an AgentFunctionCalling protocol for future refactoring
|
||||||
if (
|
if not agent.function_calling_llm: # type: ignore # "BaseAgent" has no attribute "function_calling_llm"
|
||||||
hasattr(agent, "function_calling_llm")
|
agent.function_calling_llm = self.function_calling_llm # type: ignore # "BaseAgent" has no attribute "function_calling_llm"
|
||||||
and not agent.function_calling_llm
|
|
||||||
):
|
|
||||||
agent.function_calling_llm = self.function_calling_llm
|
|
||||||
|
|
||||||
if hasattr(agent, "allow_code_execution") and agent.allow_code_execution:
|
if agent.allow_code_execution: # type: ignore # BaseAgent" has no attribute "allow_code_execution"
|
||||||
agent.tools += agent.get_code_execution_tools()
|
agent.tools += agent.get_code_execution_tools() # type: ignore # "BaseAgent" has no attribute "get_code_execution_tools"; maybe "get_delegation_tools"?
|
||||||
|
|
||||||
if hasattr(agent, "step_callback") and not agent.step_callback:
|
if not agent.step_callback: # type: ignore # "BaseAgent" has no attribute "step_callback"
|
||||||
agent.step_callback = self.step_callback
|
agent.step_callback = self.step_callback # type: ignore # "BaseAgent" has no attribute "step_callback"
|
||||||
|
|
||||||
agent.create_agent_executor()
|
agent.create_agent_executor()
|
||||||
|
|
||||||
@@ -313,17 +345,13 @@ class Crew(BaseModel):
|
|||||||
if self.process == Process.sequential:
|
if self.process == Process.sequential:
|
||||||
result = self._run_sequential_process()
|
result = self._run_sequential_process()
|
||||||
elif self.process == Process.hierarchical:
|
elif self.process == Process.hierarchical:
|
||||||
# type: ignore # Unpacking a string is disallowed
|
result, manager_metrics = self._run_hierarchical_process() # type: ignore # Incompatible types in assignment (expression has type "str | dict[str, Any]", variable has type "str")
|
||||||
result, manager_metrics = self._run_hierarchical_process()
|
|
||||||
# type: ignore # Cannot determine type of "manager_metrics"
|
|
||||||
metrics.append(manager_metrics)
|
metrics.append(manager_metrics)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
f"The process '{self.process}' is not implemented yet."
|
f"The process '{self.process}' is not implemented yet."
|
||||||
)
|
)
|
||||||
metrics = metrics + [
|
metrics += [agent._token_process.get_summary() for agent in self.agents]
|
||||||
agent._token_process.get_summary() for agent in self.agents
|
|
||||||
]
|
|
||||||
|
|
||||||
self.usage_metrics = {
|
self.usage_metrics = {
|
||||||
key: sum([m[key] for m in metrics if m is not None]) for key in metrics[0]
|
key: sum([m[key] for m in metrics if m is not None]) for key in metrics[0]
|
||||||
@@ -331,21 +359,32 @@ class Crew(BaseModel):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def kickoff_for_each(self, inputs: List[Dict[str, Any]]) -> List:
|
def kickoff_for_each(
|
||||||
|
self, inputs: List[Dict[str, Any]]
|
||||||
|
) -> List[Union[str, Dict[str, Any]]]:
|
||||||
"""Executes the Crew's workflow for each input in the list and aggregates results."""
|
"""Executes the Crew's workflow for each input in the list and aggregates results."""
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
# Initialize the parent crew's usage metrics
|
||||||
|
total_usage_metrics = {
|
||||||
|
"total_tokens": 0,
|
||||||
|
"prompt_tokens": 0,
|
||||||
|
"completion_tokens": 0,
|
||||||
|
"successful_requests": 0,
|
||||||
|
}
|
||||||
|
|
||||||
for input_data in inputs:
|
for input_data in inputs:
|
||||||
crew = self.copy()
|
crew = self.copy()
|
||||||
|
|
||||||
for task in crew.tasks:
|
output = crew.kickoff(inputs=input_data)
|
||||||
task.interpolate_inputs(input_data)
|
|
||||||
for agent in crew.agents:
|
if crew.usage_metrics:
|
||||||
agent.interpolate_inputs(input_data)
|
for key in total_usage_metrics:
|
||||||
|
total_usage_metrics[key] += crew.usage_metrics.get(key, 0)
|
||||||
|
|
||||||
output = crew.kickoff()
|
|
||||||
results.append(output)
|
results.append(output)
|
||||||
|
|
||||||
|
self.usage_metrics = total_usage_metrics
|
||||||
return results
|
return results
|
||||||
|
|
||||||
async def kickoff_async(
|
async def kickoff_async(
|
||||||
@@ -355,28 +394,39 @@ class Crew(BaseModel):
|
|||||||
return await asyncio.to_thread(self.kickoff, inputs)
|
return await asyncio.to_thread(self.kickoff, inputs)
|
||||||
|
|
||||||
async def kickoff_for_each_async(self, inputs: List[Dict]) -> List[Any]:
|
async def kickoff_for_each_async(self, inputs: List[Dict]) -> List[Any]:
|
||||||
async def run_crew(input_data):
|
crew_copies = [self.copy() for _ in inputs]
|
||||||
crew = self.copy()
|
|
||||||
|
|
||||||
for task in crew.tasks:
|
async def run_crew(crew, input_data):
|
||||||
task.interpolate_inputs(input_data)
|
return await crew.kickoff_async(inputs=input_data)
|
||||||
for agent in crew.agents:
|
|
||||||
agent.interpolate_inputs(input_data)
|
|
||||||
|
|
||||||
return await crew.kickoff_async()
|
tasks = [
|
||||||
|
asyncio.create_task(run_crew(crew_copies[i], inputs[i]))
|
||||||
tasks = [asyncio.create_task(run_crew(input_data)) for input_data in inputs]
|
for i in range(len(inputs))
|
||||||
|
]
|
||||||
|
|
||||||
results = await asyncio.gather(*tasks)
|
results = await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
total_usage_metrics = {
|
||||||
|
"total_tokens": 0,
|
||||||
|
"prompt_tokens": 0,
|
||||||
|
"completion_tokens": 0,
|
||||||
|
"successful_requests": 0,
|
||||||
|
}
|
||||||
|
for crew in crew_copies:
|
||||||
|
if crew.usage_metrics:
|
||||||
|
for key in total_usage_metrics:
|
||||||
|
total_usage_metrics[key] += crew.usage_metrics.get(key, 0)
|
||||||
|
|
||||||
|
self.usage_metrics = total_usage_metrics
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _run_sequential_process(self) -> str:
|
def _run_sequential_process(self) -> Union[str, Dict[str, Any]]:
|
||||||
"""Executes tasks sequentially and returns the final output."""
|
"""Executes tasks sequentially and returns the final output."""
|
||||||
task_output = ""
|
task_output = None
|
||||||
token_usage = []
|
|
||||||
for task in self.tasks:
|
for task in self.tasks:
|
||||||
if task.agent.allow_delegation: # type: ignore # Item "None" of "Agent | None" has no attribute "allow_delegation"
|
if task.agent and task.agent.allow_delegation:
|
||||||
agents_for_delegation = [
|
agents_for_delegation = [
|
||||||
agent for agent in self.agents if agent != task.agent
|
agent for agent in self.agents if agent != task.agent
|
||||||
]
|
]
|
||||||
@@ -400,27 +450,26 @@ class Crew(BaseModel):
|
|||||||
|
|
||||||
role = task.agent.role if task.agent is not None else "None"
|
role = task.agent.role if task.agent is not None else "None"
|
||||||
self._logger.log("debug", f"== [{role}] Task output: {task_output}\n\n")
|
self._logger.log("debug", f"== [{role}] Task output: {task_output}\n\n")
|
||||||
token_summ = task.agent._token_process.get_summary()
|
|
||||||
|
|
||||||
token_usage.append(token_summ)
|
|
||||||
|
|
||||||
if self.output_log_file:
|
if self.output_log_file:
|
||||||
self._file_handler.log(agent=role, task=task_output, status="completed")
|
self._file_handler.log(agent=role, task=task_output, status="completed")
|
||||||
|
|
||||||
token_usage_formatted = self.aggregate_token_usage(token_usage)
|
|
||||||
self._finish_execution(task_output)
|
self._finish_execution(task_output)
|
||||||
|
|
||||||
# type: ignore # Incompatible return value type (got "tuple[str, Any]", expected "str")
|
token_usage = self.calculate_usage_metrics()
|
||||||
return self._format_output(task_output, token_usage_formatted)
|
|
||||||
|
|
||||||
def _run_hierarchical_process(self) -> Union[str, Dict[str, Any]]:
|
return self._format_output(task_output if task_output else "", token_usage)
|
||||||
|
|
||||||
|
def _run_hierarchical_process(
|
||||||
|
self,
|
||||||
|
) -> Tuple[Union[str, Dict[str, Any]], Dict[str, Any]]:
|
||||||
"""Creates and assigns a manager agent to make sure the crew completes the tasks."""
|
"""Creates and assigns a manager agent to make sure the crew completes the tasks."""
|
||||||
|
|
||||||
i18n = I18N(prompt_file=self.prompt_file)
|
i18n = I18N(prompt_file=self.prompt_file)
|
||||||
if self.manager_agent is not None:
|
if self.manager_agent is not None:
|
||||||
self.manager_agent.allow_delegation = True
|
self.manager_agent.allow_delegation = True
|
||||||
manager = self.manager_agent
|
manager = self.manager_agent
|
||||||
if len(manager.tools) > 0:
|
if manager.tools is not None and len(manager.tools) > 0:
|
||||||
raise Exception("Manager agent should not have tools")
|
raise Exception("Manager agent should not have tools")
|
||||||
manager.tools = self.manager_agent.get_delegation_tools(self.agents)
|
manager.tools = self.manager_agent.get_delegation_tools(self.agents)
|
||||||
else:
|
else:
|
||||||
@@ -432,9 +481,10 @@ class Crew(BaseModel):
|
|||||||
llm=self.manager_llm,
|
llm=self.manager_llm,
|
||||||
verbose=self.verbose,
|
verbose=self.verbose,
|
||||||
)
|
)
|
||||||
|
self.manager_agent = manager
|
||||||
|
|
||||||
|
task_output = None
|
||||||
|
|
||||||
task_output = ""
|
|
||||||
token_usage = []
|
|
||||||
for task in self.tasks:
|
for task in self.tasks:
|
||||||
self._logger.log("debug", f"Working Agent: {manager.role}")
|
self._logger.log("debug", f"Working Agent: {manager.role}")
|
||||||
self._logger.log("info", f"Starting Task: {task.description}")
|
self._logger.log("info", f"Starting Task: {task.description}")
|
||||||
@@ -444,14 +494,15 @@ class Crew(BaseModel):
|
|||||||
agent=manager.role, task=task.description, status="started"
|
agent=manager.role, task=task.description, status="started"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if task.agent:
|
||||||
|
manager.tools = task.agent.get_delegation_tools([task.agent])
|
||||||
|
else:
|
||||||
|
manager.tools = manager.get_delegation_tools(self.agents)
|
||||||
task_output = task.execute(
|
task_output = task.execute(
|
||||||
agent=manager, context=task_output, tools=manager.tools
|
agent=manager, context=task_output, tools=manager.tools
|
||||||
)
|
)
|
||||||
|
|
||||||
self._logger.log("debug", f"[{manager.role}] Task output: {task_output}")
|
self._logger.log("debug", f"[{manager.role}] Task output: {task_output}")
|
||||||
if hasattr(task, "agent._token_process"):
|
|
||||||
token_summ = task.agent._token_process.get_summary()
|
|
||||||
token_usage.append(token_summ)
|
|
||||||
if self.output_log_file:
|
if self.output_log_file:
|
||||||
self._file_handler.log(
|
self._file_handler.log(
|
||||||
agent=manager.role, task=task_output, status="completed"
|
agent=manager.role, task=task_output, status="completed"
|
||||||
@@ -459,14 +510,11 @@ class Crew(BaseModel):
|
|||||||
|
|
||||||
self._finish_execution(task_output)
|
self._finish_execution(task_output)
|
||||||
|
|
||||||
# type: ignore # Incompatible return value type (got "tuple[str, Any]", expected "str")
|
token_usage = self.calculate_usage_metrics()
|
||||||
manager_token_usage = manager._token_process.get_summary()
|
|
||||||
token_usage.append(manager_token_usage)
|
|
||||||
token_usage_formatted = self.aggregate_token_usage(token_usage)
|
|
||||||
|
|
||||||
return self._format_output(
|
return self._format_output(
|
||||||
task_output, token_usage_formatted
|
task_output if task_output else "", token_usage
|
||||||
), manager_token_usage
|
), token_usage
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
"""Create a deep copy of the Crew."""
|
"""Create a deep copy of the Crew."""
|
||||||
@@ -481,12 +529,13 @@ class Crew(BaseModel):
|
|||||||
"_short_term_memory",
|
"_short_term_memory",
|
||||||
"_long_term_memory",
|
"_long_term_memory",
|
||||||
"_entity_memory",
|
"_entity_memory",
|
||||||
|
"_telemetry",
|
||||||
"agents",
|
"agents",
|
||||||
"tasks",
|
"tasks",
|
||||||
}
|
}
|
||||||
|
|
||||||
cloned_agents = [agent.copy() for agent in self.agents]
|
cloned_agents = [agent.copy() for agent in self.agents]
|
||||||
cloned_tasks = [task.copy() for task in self.tasks]
|
cloned_tasks = [task.copy(cloned_agents) for task in self.tasks]
|
||||||
|
|
||||||
copied_data = self.model_dump(exclude=exclude)
|
copied_data = self.model_dump(exclude=exclude)
|
||||||
copied_data = {k: v for k, v in copied_data.items() if v is not None}
|
copied_data = {k: v for k, v in copied_data.items() if v is not None}
|
||||||
@@ -524,8 +573,9 @@ class Crew(BaseModel):
|
|||||||
Formats the output of the crew execution.
|
Formats the output of the crew execution.
|
||||||
If full_output is True, then returned data type will be a dictionary else returned outputs are string
|
If full_output is True, then returned data type will be a dictionary else returned outputs are string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.full_output:
|
if self.full_output:
|
||||||
return { # type: ignore # Incompatible return value type (got "dict[str, Sequence[str | TaskOutput | None]]", expected "str")
|
return {
|
||||||
"final_output": output,
|
"final_output": output,
|
||||||
"tasks_outputs": [task.output for task in self.tasks if task],
|
"tasks_outputs": [task.output for task in self.tasks if task],
|
||||||
"usage_metrics": token_usage,
|
"usage_metrics": token_usage,
|
||||||
@@ -536,13 +586,33 @@ class Crew(BaseModel):
|
|||||||
def _finish_execution(self, output) -> None:
|
def _finish_execution(self, output) -> None:
|
||||||
if self.max_rpm:
|
if self.max_rpm:
|
||||||
self._rpm_controller.stop_rpm_counter()
|
self._rpm_controller.stop_rpm_counter()
|
||||||
|
if agentops:
|
||||||
|
agentops.end_session(
|
||||||
|
end_state="Success", end_state_reason="Finished Execution"
|
||||||
|
)
|
||||||
self._telemetry.end_crew(self, output)
|
self._telemetry.end_crew(self, output)
|
||||||
|
|
||||||
|
def calculate_usage_metrics(self) -> Dict[str, int]:
|
||||||
|
"""Calculates and returns the usage metrics."""
|
||||||
|
total_usage_metrics = {
|
||||||
|
"total_tokens": 0,
|
||||||
|
"prompt_tokens": 0,
|
||||||
|
"completion_tokens": 0,
|
||||||
|
"successful_requests": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
for agent in self.agents:
|
||||||
|
if hasattr(agent, "_token_process"):
|
||||||
|
token_sum = agent._token_process.get_summary()
|
||||||
|
for key in total_usage_metrics:
|
||||||
|
total_usage_metrics[key] += token_sum.get(key, 0)
|
||||||
|
|
||||||
|
if self.manager_agent and hasattr(self.manager_agent, "_token_process"):
|
||||||
|
token_sum = self.manager_agent._token_process.get_summary()
|
||||||
|
for key in total_usage_metrics:
|
||||||
|
total_usage_metrics[key] += token_sum.get(key, 0)
|
||||||
|
|
||||||
|
return total_usage_metrics
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"Crew(id={self.id}, process={self.process}, number_of_agents={len(self.agents)}, number_of_tasks={len(self.tasks)})"
|
return f"Crew(id={self.id}, process={self.process}, number_of_agents={len(self.agents)}, number_of_tasks={len(self.tasks)})"
|
||||||
|
|
||||||
def aggregate_token_usage(self, token_usage_list: List[Dict[str, Any]]):
|
|
||||||
return {
|
|
||||||
key: sum([m[key] for m in token_usage_list if m is not None])
|
|
||||||
for key in token_usage_list[0]
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import threading
|
|
||||||
import uuid
|
import uuid
|
||||||
from copy import deepcopy
|
from concurrent.futures import Future, ThreadPoolExecutor
|
||||||
from typing import Any, Dict, List, Optional, Type
|
from copy import copy
|
||||||
|
from typing import Any, Dict, List, Optional, Type, Union
|
||||||
|
|
||||||
from langchain_openai import ChatOpenAI
|
from langchain_openai import ChatOpenAI
|
||||||
from opentelemetry.trace import Span
|
from opentelemetry.trace import Span
|
||||||
@@ -13,7 +13,10 @@ from pydantic_core import PydanticCustomError
|
|||||||
from crewai.agents.agent_builder.base_agent import BaseAgent
|
from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||||
from crewai.tasks.task_output import TaskOutput
|
from crewai.tasks.task_output import TaskOutput
|
||||||
from crewai.telemetry.telemetry import Telemetry
|
from crewai.telemetry.telemetry import Telemetry
|
||||||
from crewai.utilities import I18N, ConverterError, Printer
|
from crewai.utilities.converter import ConverterError
|
||||||
|
from crewai.utilities.converter import Converter
|
||||||
|
from crewai.utilities.i18n import I18N
|
||||||
|
from crewai.utilities.printer import Printer
|
||||||
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
||||||
|
|
||||||
|
|
||||||
@@ -95,12 +98,16 @@ class Task(BaseModel):
|
|||||||
description="Whether the task should have a human review the final answer of the agent",
|
description="Whether the task should have a human review the final answer of the agent",
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
converter_cls: Optional[Type[Converter]] = Field(
|
||||||
|
description="A converter class used to export structured output",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
_telemetry: Telemetry
|
_telemetry: Telemetry
|
||||||
_execution_span: Span | None = None
|
_execution_span: Span | None = None
|
||||||
_original_description: str | None = None
|
_original_description: str | None = None
|
||||||
_original_expected_output: str | None = None
|
_original_expected_output: str | None = None
|
||||||
_thread: threading.Thread | None = None
|
_future: Future | None = None
|
||||||
|
|
||||||
def __init__(__pydantic_self__, **data):
|
def __init__(__pydantic_self__, **data):
|
||||||
config = data.pop("config", {})
|
config = data.pop("config", {})
|
||||||
@@ -159,20 +166,20 @@ class Task(BaseModel):
|
|||||||
"""Wait for asynchronous task completion and return the output."""
|
"""Wait for asynchronous task completion and return the output."""
|
||||||
assert self.async_execution, "Task is not set to be executed asynchronously."
|
assert self.async_execution, "Task is not set to be executed asynchronously."
|
||||||
|
|
||||||
if self._thread:
|
if self._future:
|
||||||
self._thread.join()
|
self._future.result() # Wait for the future to complete
|
||||||
self._thread = None
|
self._future = None
|
||||||
|
|
||||||
assert self.output, "Task output is not set."
|
assert self.output, "Task output is not set."
|
||||||
|
|
||||||
return self.output.exported_output
|
return self.output.exported_output
|
||||||
|
|
||||||
def execute( # type: ignore # Missing return statement
|
def execute(
|
||||||
self,
|
self,
|
||||||
agent: BaseAgent | None = None,
|
agent: BaseAgent | None = None,
|
||||||
context: Optional[str] = None,
|
context: Optional[str] = None,
|
||||||
tools: Optional[List[Any]] = None,
|
tools: Optional[List[Any]] = None,
|
||||||
) -> str:
|
) -> str | None:
|
||||||
"""Execute the task.
|
"""Execute the task.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -184,29 +191,28 @@ class Task(BaseModel):
|
|||||||
agent = agent or self.agent
|
agent = agent or self.agent
|
||||||
if not agent:
|
if not agent:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"The task '{self.description}' has no agent assigned, therefore it can't be executed directly and should be executed in a Crew using a specific process that support that, like hierarchical."
|
f"The task '{self.description}' has no agent assigned, therefore it can't be executed directly "
|
||||||
|
"and should be executed in a Crew using a specific process that support that, like hierarchical."
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.context:
|
if self.context:
|
||||||
# type: ignore # Incompatible types in assignment (expression has type "list[Never]", variable has type "str | None")
|
internal_context = []
|
||||||
context = []
|
|
||||||
for task in self.context:
|
for task in self.context:
|
||||||
if task.async_execution:
|
if task.async_execution:
|
||||||
task.wait_for_completion()
|
task.wait_for_completion()
|
||||||
if task.output:
|
if task.output:
|
||||||
# type: ignore # Item "str" of "str | None" has no attribute "append"
|
internal_context.append(task.output.raw_output)
|
||||||
context.append(task.output.raw_output)
|
context = "\n".join(internal_context)
|
||||||
# type: ignore # Argument 1 to "join" of "str" has incompatible type "str | None"; expected "Iterable[str]"
|
|
||||||
context = "\n".join(context)
|
|
||||||
|
|
||||||
self.prompt_context = context
|
self.prompt_context = context
|
||||||
tools = tools or self.tools
|
tools = tools or self.tools
|
||||||
|
|
||||||
if self.async_execution:
|
if self.async_execution:
|
||||||
self._thread = threading.Thread(
|
with ThreadPoolExecutor() as executor:
|
||||||
target=self._execute, args=(agent, self, context, tools)
|
self._future = executor.submit(
|
||||||
|
self._execute, agent, self, context, tools
|
||||||
)
|
)
|
||||||
self._thread.start()
|
return None
|
||||||
else:
|
else:
|
||||||
result = self._execute(
|
result = self._execute(
|
||||||
task=self,
|
task=self,
|
||||||
@@ -216,7 +222,7 @@ class Task(BaseModel):
|
|||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _execute(self, agent, task, context, tools):
|
def _execute(self, agent: "BaseAgent", task, context, tools) -> str | None:
|
||||||
result = agent.execute_task(
|
result = agent.execute_task(
|
||||||
task=task,
|
task=task,
|
||||||
context=context,
|
context=context,
|
||||||
@@ -224,7 +230,6 @@ class Task(BaseModel):
|
|||||||
)
|
)
|
||||||
exported_output = self._export_output(result)
|
exported_output = self._export_output(result)
|
||||||
|
|
||||||
# type: ignore # the responses are usually str but need to figure out a more elegant solution here
|
|
||||||
self.output = TaskOutput(
|
self.output = TaskOutput(
|
||||||
description=self.description,
|
description=self.description,
|
||||||
exported_output=exported_output,
|
exported_output=exported_output,
|
||||||
@@ -274,7 +279,7 @@ class Task(BaseModel):
|
|||||||
"""Increment the delegations counter."""
|
"""Increment the delegations counter."""
|
||||||
self.delegations += 1
|
self.delegations += 1
|
||||||
|
|
||||||
def copy(self):
|
def copy(self, agents: Optional[List["BaseAgent"]] = None) -> "Task": # type: ignore # Signature of "copy" incompatible with supertype "BaseModel"
|
||||||
"""Create a deep copy of the Task."""
|
"""Create a deep copy of the Task."""
|
||||||
exclude = {
|
exclude = {
|
||||||
"id",
|
"id",
|
||||||
@@ -289,8 +294,12 @@ class Task(BaseModel):
|
|||||||
cloned_context = (
|
cloned_context = (
|
||||||
[task.copy() for task in self.context] if self.context else None
|
[task.copy() for task in self.context] if self.context else None
|
||||||
)
|
)
|
||||||
cloned_agent = self.agent.copy() if self.agent else None
|
|
||||||
cloned_tools = deepcopy(self.tools) if self.tools else []
|
def get_agent_by_role(role: str) -> Union["BaseAgent", None]:
|
||||||
|
return next((agent for agent in agents if agent.role == role), None) # type: ignore # Item "None" of "list[BaseAgent] | None" has no attribute "__iter__" (not iterable)
|
||||||
|
|
||||||
|
cloned_agent = get_agent_by_role(self.agent.role) if self.agent else None
|
||||||
|
cloned_tools = copy(self.tools) if self.tools else []
|
||||||
|
|
||||||
copied_task = Task(
|
copied_task = Task(
|
||||||
**copied_data,
|
**copied_data,
|
||||||
@@ -298,8 +307,19 @@ class Task(BaseModel):
|
|||||||
agent=cloned_agent,
|
agent=cloned_agent,
|
||||||
tools=cloned_tools,
|
tools=cloned_tools,
|
||||||
)
|
)
|
||||||
|
|
||||||
return copied_task
|
return copied_task
|
||||||
|
|
||||||
|
def _create_converter(self, *args, **kwargs) -> Converter: # type: ignore
|
||||||
|
converter = self.agent.get_output_converter( # type: ignore # Item "None" of "BaseAgent | None" has no attribute "get_output_converter"
|
||||||
|
*args, **kwargs
|
||||||
|
)
|
||||||
|
if self.converter_cls:
|
||||||
|
converter = self.converter_cls( # type: ignore # Item "None" of "BaseAgent | None" has no attribute "get_output_converter"
|
||||||
|
*args, **kwargs
|
||||||
|
)
|
||||||
|
return converter
|
||||||
|
|
||||||
def _export_output(self, result: str) -> Any:
|
def _export_output(self, result: str) -> Any:
|
||||||
exported_result = result
|
exported_result = result
|
||||||
instructions = "I'm gonna convert this raw text into valid JSON."
|
instructions = "I'm gonna convert this raw text into valid JSON."
|
||||||
@@ -309,34 +329,28 @@ class Task(BaseModel):
|
|||||||
|
|
||||||
# try to convert task_output directly to pydantic/json
|
# try to convert task_output directly to pydantic/json
|
||||||
try:
|
try:
|
||||||
# type: ignore # Item "None" of "type[BaseModel] | None" has no attribute "model_validate_json"
|
exported_result = model.model_validate_json(result) # type: ignore # Item "None" of "type[BaseModel] | None" has no attribute "model_validate_json"
|
||||||
exported_result = model.model_validate_json(result)
|
|
||||||
if self.output_json:
|
if self.output_json:
|
||||||
# type: ignore # "str" has no attribute "model_dump"
|
return exported_result.model_dump() # type: ignore # "str" has no attribute "model_dump"
|
||||||
return exported_result.model_dump()
|
|
||||||
return exported_result
|
return exported_result
|
||||||
except Exception:
|
except Exception:
|
||||||
# sometimes the response contains valid JSON in the middle of text
|
# sometimes the response contains valid JSON in the middle of text
|
||||||
match = re.search(r"({.*})", result, re.DOTALL)
|
match = re.search(r"({.*})", result, re.DOTALL)
|
||||||
if match:
|
if match:
|
||||||
try:
|
try:
|
||||||
# type: ignore # Item "None" of "type[BaseModel] | None" has no attribute "model_validate_json"
|
exported_result = model.model_validate_json(match.group(0)) # type: ignore # Item "None" of "type[BaseModel] | None" has no attribute "model_validate_json"
|
||||||
exported_result = model.model_validate_json(match.group(0))
|
|
||||||
if self.output_json:
|
if self.output_json:
|
||||||
# type: ignore # "str" has no attribute "model_dump"
|
return exported_result.model_dump() # type: ignore # "str" has no attribute "model_dump"
|
||||||
return exported_result.model_dump()
|
|
||||||
return exported_result
|
return exported_result
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# type: ignore # Item "None" of "Agent | None" has no attribute "function_calling_llm"
|
llm = getattr(self.agent, "function_calling_llm", None) or self.agent.llm # type: ignore # Item "None" of "BaseAgent | None" has no attribute "function_calling_llm"
|
||||||
llm = getattr(self.agent, "function_calling_llm", None) or self.agent.llm
|
|
||||||
if not self._is_gpt(llm):
|
if not self._is_gpt(llm):
|
||||||
# type: ignore # Argument "model" to "PydanticSchemaParser" has incompatible type "type[BaseModel] | None"; expected "type[BaseModel]"
|
model_schema = PydanticSchemaParser(model=model).get_schema() # type: ignore # Argument "model" to "PydanticSchemaParser" has incompatible type "type[BaseModel] | None"; expected "type[BaseModel]"
|
||||||
model_schema = PydanticSchemaParser(model=model).get_schema()
|
|
||||||
instructions = f"{instructions}\n\nThe json should have the following structure, with the following keys:\n{model_schema}"
|
instructions = f"{instructions}\n\nThe json should have the following structure, with the following keys:\n{model_schema}"
|
||||||
|
|
||||||
converter = self.agent.get_output_converter(
|
converter = self._create_converter( # type: ignore # Item "None" of "BaseAgent | None" has no attribute "get_output_converter"
|
||||||
llm=llm, text=result, model=model, instructions=instructions
|
llm=llm, text=result, model=model, instructions=instructions
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -354,8 +368,9 @@ class Task(BaseModel):
|
|||||||
|
|
||||||
if self.output_file:
|
if self.output_file:
|
||||||
content = (
|
content = (
|
||||||
# type: ignore # "str" has no attribute "json"
|
exported_result
|
||||||
exported_result if not self.output_pydantic else exported_result.json()
|
if not self.output_pydantic
|
||||||
|
else exported_result.model_dump_json() # type: ignore # "str" has no attribute "json"
|
||||||
)
|
)
|
||||||
self._save_file(content)
|
self._save_file(content)
|
||||||
|
|
||||||
@@ -365,14 +380,12 @@ class Task(BaseModel):
|
|||||||
return isinstance(llm, ChatOpenAI) and llm.openai_api_base is None
|
return isinstance(llm, ChatOpenAI) and llm.openai_api_base is None
|
||||||
|
|
||||||
def _save_file(self, result: Any) -> None:
|
def _save_file(self, result: Any) -> None:
|
||||||
# type: ignore # Value of type variable "AnyOrLiteralStr" of "dirname" cannot be "str | None"
|
directory = os.path.dirname(self.output_file) # type: ignore # Value of type variable "AnyOrLiteralStr" of "dirname" cannot be "str | None"
|
||||||
directory = os.path.dirname(self.output_file)
|
|
||||||
|
|
||||||
if directory and not os.path.exists(directory):
|
if directory and not os.path.exists(directory):
|
||||||
os.makedirs(directory)
|
os.makedirs(directory)
|
||||||
|
|
||||||
# type: ignore # Argument 1 to "open" has incompatible type "str | None"; expected "int | str | bytes | PathLike[str] | PathLike[bytes]"
|
with open(self.output_file, "w", encoding="utf-8") as file: # type: ignore # Argument 1 to "open" has incompatible type "str | None"; expected "int | str | bytes | PathLike[str] | PathLike[bytes]"
|
||||||
with open(self.output_file, "w", encoding="utf-8") as file:
|
|
||||||
file.write(result)
|
file.write(result)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,11 @@ from crewai.telemetry import Telemetry
|
|||||||
from crewai.tools.tool_calling import InstructorToolCalling, ToolCalling
|
from crewai.tools.tool_calling import InstructorToolCalling, ToolCalling
|
||||||
from crewai.utilities import I18N, Converter, ConverterError, Printer
|
from crewai.utilities import I18N, Converter, ConverterError, Printer
|
||||||
|
|
||||||
|
try:
|
||||||
|
import agentops
|
||||||
|
except ImportError:
|
||||||
|
agentops = None
|
||||||
|
|
||||||
OPENAI_BIGGER_MODELS = ["gpt-4"]
|
OPENAI_BIGGER_MODELS = ["gpt-4"]
|
||||||
|
|
||||||
|
|
||||||
@@ -45,6 +50,7 @@ class ToolUsage:
|
|||||||
tools_names: str,
|
tools_names: str,
|
||||||
task: Any,
|
task: Any,
|
||||||
function_calling_llm: Any,
|
function_calling_llm: Any,
|
||||||
|
agent: Any,
|
||||||
action: Any,
|
action: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._i18n: I18N = I18N()
|
self._i18n: I18N = I18N()
|
||||||
@@ -53,6 +59,7 @@ class ToolUsage:
|
|||||||
self._run_attempts: int = 1
|
self._run_attempts: int = 1
|
||||||
self._max_parsing_attempts: int = 3
|
self._max_parsing_attempts: int = 3
|
||||||
self._remember_format_after_usages: int = 3
|
self._remember_format_after_usages: int = 3
|
||||||
|
self.agent = agent
|
||||||
self.tools_description = tools_description
|
self.tools_description = tools_description
|
||||||
self.tools_names = tools_names
|
self.tools_names = tools_names
|
||||||
self.tools_handler = tools_handler
|
self.tools_handler = tools_handler
|
||||||
@@ -98,7 +105,8 @@ class ToolUsage:
|
|||||||
tool_string: str,
|
tool_string: str,
|
||||||
tool: BaseTool,
|
tool: BaseTool,
|
||||||
calling: Union[ToolCalling, InstructorToolCalling],
|
calling: Union[ToolCalling, InstructorToolCalling],
|
||||||
) -> str: # TODO: Fix this return type --> finecwg : I updated return type to str
|
) -> str: # TODO: Fix this return type
|
||||||
|
tool_event = agentops.ToolEvent(name=calling.tool_name) if agentops else None
|
||||||
if self._check_tool_repeated_usage(calling=calling): # type: ignore # _check_tool_repeated_usage of "ToolUsage" does not return a value (it only ever returns None)
|
if self._check_tool_repeated_usage(calling=calling): # type: ignore # _check_tool_repeated_usage of "ToolUsage" does not return a value (it only ever returns None)
|
||||||
try:
|
try:
|
||||||
result = self._i18n.errors("task_repeated_usage").format(
|
result = self._i18n.errors("task_repeated_usage").format(
|
||||||
@@ -123,6 +131,10 @@ class ToolUsage:
|
|||||||
tool=calling.tool_name, input=calling.arguments
|
tool=calling.tool_name, input=calling.arguments
|
||||||
)
|
)
|
||||||
|
|
||||||
|
original_tool = next(
|
||||||
|
(ot for ot in self.original_tools if ot.name == tool.name), None
|
||||||
|
)
|
||||||
|
|
||||||
if result is None: #! finecwg: if not result --> if result is None
|
if result is None: #! finecwg: if not result --> if result is None
|
||||||
try:
|
try:
|
||||||
if calling.tool_name in [
|
if calling.tool_name in [
|
||||||
@@ -139,16 +151,12 @@ class ToolUsage:
|
|||||||
for k, v in calling.arguments.items()
|
for k, v in calling.arguments.items()
|
||||||
if k in acceptable_args
|
if k in acceptable_args
|
||||||
}
|
}
|
||||||
result = tool._run(**arguments)
|
result = tool.invoke(input=arguments)
|
||||||
except Exception:
|
except Exception:
|
||||||
if tool.args_schema:
|
|
||||||
arguments = calling.arguments
|
arguments = calling.arguments
|
||||||
result = tool._run(**arguments)
|
result = tool.invoke(input=arguments)
|
||||||
else:
|
else:
|
||||||
arguments = calling.arguments.values() # type: ignore # Incompatible types in assignment (expression has type "dict_values[str, Any]", variable has type "dict[str, Any]")
|
result = tool.invoke(input={})
|
||||||
result = tool._run(*arguments)
|
|
||||||
else:
|
|
||||||
result = tool._run()
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._run_attempts += 1
|
self._run_attempts += 1
|
||||||
if self._run_attempts > self._max_parsing_attempts:
|
if self._run_attempts > self._max_parsing_attempts:
|
||||||
@@ -164,13 +172,14 @@ class ToolUsage:
|
|||||||
return error # type: ignore # No return value expected
|
return error # type: ignore # No return value expected
|
||||||
|
|
||||||
self.task.increment_tools_errors()
|
self.task.increment_tools_errors()
|
||||||
|
if agentops:
|
||||||
|
agentops.record(
|
||||||
|
agentops.ErrorEvent(exception=e, trigger_event=tool_event)
|
||||||
|
)
|
||||||
return self.use(calling=calling, tool_string=tool_string) # type: ignore # No return value expected
|
return self.use(calling=calling, tool_string=tool_string) # type: ignore # No return value expected
|
||||||
|
|
||||||
if self.tools_handler:
|
if self.tools_handler:
|
||||||
should_cache = True
|
should_cache = True
|
||||||
original_tool = next(
|
|
||||||
(ot for ot in self.original_tools if ot.name == tool.name), None
|
|
||||||
)
|
|
||||||
if (
|
if (
|
||||||
hasattr(original_tool, "cache_function")
|
hasattr(original_tool, "cache_function")
|
||||||
and original_tool.cache_function # type: ignore # Item "None" of "Any | None" has no attribute "cache_function"
|
and original_tool.cache_function # type: ignore # Item "None" of "Any | None" has no attribute "cache_function"
|
||||||
@@ -184,12 +193,29 @@ class ToolUsage:
|
|||||||
)
|
)
|
||||||
|
|
||||||
self._printer.print(content=f"\n\n{result}\n", color="purple")
|
self._printer.print(content=f"\n\n{result}\n", color="purple")
|
||||||
|
if agentops:
|
||||||
|
agentops.record(tool_event)
|
||||||
self._telemetry.tool_usage(
|
self._telemetry.tool_usage(
|
||||||
llm=self.function_calling_llm,
|
llm=self.function_calling_llm,
|
||||||
tool_name=tool.name,
|
tool_name=tool.name,
|
||||||
attempts=self._run_attempts,
|
attempts=self._run_attempts,
|
||||||
)
|
)
|
||||||
result = self._format_result(result=result) # type: ignore # "_format_result" of "ToolUsage" does not return a value (it only ever returns None)
|
result = self._format_result(result=result) # type: ignore # "_format_result" of "ToolUsage" does not return a value (it only ever returns None)
|
||||||
|
data = {
|
||||||
|
"result": result,
|
||||||
|
"tool_name": tool.name,
|
||||||
|
"tool_args": calling.arguments,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
hasattr(original_tool, "result_as_answer")
|
||||||
|
and original_tool.result_as_answer # type: ignore # Item "None" of "Any | None" has no attribute "cache_function"
|
||||||
|
):
|
||||||
|
result_as_answer = original_tool.result_as_answer # type: ignore # Item "None" of "Any | None" has no attribute "result_as_answer"
|
||||||
|
data["result_as_answer"] = result_as_answer
|
||||||
|
|
||||||
|
self.agent.tools_results.append(data)
|
||||||
|
|
||||||
return result # type: ignore # No return value expected
|
return result # type: ignore # No return value expected
|
||||||
|
|
||||||
def _format_result(self, result: Any) -> None:
|
def _format_result(self, result: Any) -> None:
|
||||||
@@ -290,7 +316,7 @@ class ToolUsage:
|
|||||||
Example:
|
Example:
|
||||||
{"tool_name": "tool name", "arguments": {"arg_name1": "value", "arg_name2": 2}}""",
|
{"tool_name": "tool name", "arguments": {"arg_name1": "value", "arg_name2": 2}}""",
|
||||||
),
|
),
|
||||||
max_attemps=1,
|
max_attempts=1,
|
||||||
)
|
)
|
||||||
calling = converter.to_pydantic()
|
calling = converter.to_pydantic()
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@
|
|||||||
"task_with_context": "{task}\n\nThis is the context you're working with:\n{context}",
|
"task_with_context": "{task}\n\nThis is the context you're working with:\n{context}",
|
||||||
"expected_output": "\nThis is the expect criteria for your final answer: {expected_output} \n you MUST return the actual complete content as the final answer, not a summary.",
|
"expected_output": "\nThis is the expect criteria for your final answer: {expected_output} \n you MUST return the actual complete content as the final answer, not a summary.",
|
||||||
"human_feedback": "You got human feedback on your work, re-avaluate it and give a new Final Answer when ready.\n {human_feedback}",
|
"human_feedback": "You got human feedback on your work, re-avaluate it and give a new Final Answer when ready.\n {human_feedback}",
|
||||||
"getting_input": "This is the agent final answer: {final_answer}\nPlease provide a feedback: "
|
"getting_input": "This is the agent's final answer: {final_answer}\nPlease provide feedback: "
|
||||||
},
|
},
|
||||||
"errors": {
|
"errors": {
|
||||||
"force_final_answer": "Tool won't be use because it's time to give your final answer. Don't use tools and just your absolute BEST Final answer.",
|
"force_final_answer": "Tool won't be use because it's time to give your final answer. Don't use tools and just your absolute BEST Final answer.",
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class Converter(OutputConverter):
|
|||||||
else:
|
else:
|
||||||
return self._create_chain().invoke({})
|
return self._create_chain().invoke({})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if current_attempt < self.max_attemps:
|
if current_attempt < self.max_attempts:
|
||||||
return self.to_pydantic(current_attempt + 1)
|
return self.to_pydantic(current_attempt + 1)
|
||||||
return ConverterError(
|
return ConverterError(
|
||||||
f"Failed to convert text into a pydantic model due to the following error: {e}"
|
f"Failed to convert text into a pydantic model due to the following error: {e}"
|
||||||
@@ -46,7 +46,7 @@ class Converter(OutputConverter):
|
|||||||
else:
|
else:
|
||||||
return json.dumps(self._create_chain().invoke({}).model_dump())
|
return json.dumps(self._create_chain().invoke({}).model_dump())
|
||||||
except Exception:
|
except Exception:
|
||||||
if current_attempt < self.max_attemps:
|
if current_attempt < self.max_attempts:
|
||||||
return self.to_json(current_attempt + 1)
|
return self.to_json(current_attempt + 1)
|
||||||
return ConverterError("Failed to convert text into JSON.")
|
return ConverterError("Failed to convert text into JSON.")
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ class Converter(OutputConverter):
|
|||||||
|
|
||||||
inst = Instructor(
|
inst = Instructor(
|
||||||
llm=self.llm,
|
llm=self.llm,
|
||||||
max_attemps=self.max_attemps,
|
max_attempts=self.max_attempts,
|
||||||
model=self.model,
|
model=self.model,
|
||||||
content=self.text,
|
content=self.text,
|
||||||
instructions=self.instructions,
|
instructions=self.instructions,
|
||||||
|
|||||||
@@ -6,6 +6,17 @@ from pydantic import BaseModel, Field
|
|||||||
from crewai.utilities import Converter
|
from crewai.utilities import Converter
|
||||||
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
||||||
|
|
||||||
|
agentops = None
|
||||||
|
try:
|
||||||
|
from agentops import track_agent
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
def track_agent(name):
|
||||||
|
def noop(f):
|
||||||
|
return f
|
||||||
|
|
||||||
|
return noop
|
||||||
|
|
||||||
|
|
||||||
class Entity(BaseModel):
|
class Entity(BaseModel):
|
||||||
name: str = Field(description="The name of the entity.")
|
name: str = Field(description="The name of the entity.")
|
||||||
@@ -38,6 +49,7 @@ class TrainingTaskEvaluation(BaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@track_agent(name="Task Evaluator")
|
||||||
class TaskEvaluator:
|
class TaskEvaluator:
|
||||||
def __init__(self, original_agent):
|
def __init__(self, original_agent):
|
||||||
self.llm = original_agent.llm
|
self.llm = original_agent.llm
|
||||||
|
|||||||
@@ -31,9 +31,8 @@ class PickleHandler:
|
|||||||
- file_name (str): The name of the file for saving and loading data.
|
- file_name (str): The name of the file for saving and loading data.
|
||||||
"""
|
"""
|
||||||
self.file_path = os.path.join(os.getcwd(), file_name)
|
self.file_path = os.path.join(os.getcwd(), file_name)
|
||||||
self._initialize_file()
|
|
||||||
|
|
||||||
def _initialize_file(self) -> None:
|
def initialize_file(self) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize the file with an empty dictionary if it does not exist or is empty.
|
Initialize the file with an empty dictionary if it does not exist or is empty.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -8,18 +8,18 @@ from crewai.agents.agent_builder.utilities.base_token_process import TokenProces
|
|||||||
|
|
||||||
|
|
||||||
class TokenCalcHandler(BaseCallbackHandler):
|
class TokenCalcHandler(BaseCallbackHandler):
|
||||||
model: str = ""
|
model_name: str = ""
|
||||||
token_cost_process: TokenProcess
|
token_cost_process: TokenProcess
|
||||||
|
|
||||||
def __init__(self, model, token_cost_process):
|
def __init__(self, model_name, token_cost_process):
|
||||||
self.model = model
|
self.model_name = model_name
|
||||||
self.token_cost_process = token_cost_process
|
self.token_cost_process = token_cost_process
|
||||||
|
|
||||||
def on_llm_start(
|
def on_llm_start(
|
||||||
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
|
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
|
||||||
) -> None:
|
) -> None:
|
||||||
try:
|
try:
|
||||||
encoding = tiktoken.encoding_for_model(self.model)
|
encoding = tiktoken.encoding_for_model(self.model_name)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
encoding = tiktoken.get_encoding("cl100k_base")
|
encoding = tiktoken.get_encoding("cl100k_base")
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from crewai import Agent, Crew, Task
|
|||||||
from crewai.agents.cache import CacheHandler
|
from crewai.agents.cache import CacheHandler
|
||||||
from crewai.agents.executor import CrewAgentExecutor
|
from crewai.agents.executor import CrewAgentExecutor
|
||||||
from crewai.agents.parser import CrewAgentParser
|
from crewai.agents.parser import CrewAgentParser
|
||||||
|
|
||||||
from crewai.tools.tool_calling import InstructorToolCalling
|
from crewai.tools.tool_calling import InstructorToolCalling
|
||||||
from crewai.tools.tool_usage import ToolUsage
|
from crewai.tools.tool_usage import ToolUsage
|
||||||
from crewai.utilities import RPMController
|
from crewai.utilities import RPMController
|
||||||
@@ -724,6 +723,73 @@ def test_agent_count_formatting_error():
|
|||||||
mock_count_errors.assert_called_once()
|
mock_count_errors.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_tool_result_as_answer_is_the_final_answer_for_the_agent():
|
||||||
|
from crewai_tools import BaseTool
|
||||||
|
|
||||||
|
class MyCustomTool(BaseTool):
|
||||||
|
name: str = "Get Greetings"
|
||||||
|
description: str = "Get a random greeting back"
|
||||||
|
|
||||||
|
def _run(self) -> str:
|
||||||
|
return "Howdy!"
|
||||||
|
|
||||||
|
agent1 = Agent(
|
||||||
|
role="Data Scientist",
|
||||||
|
goal="Product amazing resports on AI",
|
||||||
|
backstory="You work with data and AI",
|
||||||
|
tools=[MyCustomTool(result_as_answer=True)],
|
||||||
|
)
|
||||||
|
|
||||||
|
essay = Task(
|
||||||
|
description="Write and then review an small paragraph on AI until it's AMAZING. But first use the `Get Greetings` tool to get a greeting.",
|
||||||
|
expected_output="The final paragraph with the full review on AI and no greeting.",
|
||||||
|
agent=agent1,
|
||||||
|
)
|
||||||
|
tasks = [essay]
|
||||||
|
crew = Crew(agents=[agent1], tasks=tasks)
|
||||||
|
|
||||||
|
result = crew.kickoff()
|
||||||
|
assert result == "Howdy!"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_tool_usage_information_is_appended_to_agent():
|
||||||
|
from crewai_tools import BaseTool
|
||||||
|
|
||||||
|
class MyCustomTool(BaseTool):
|
||||||
|
name: str = "Decide Greetings"
|
||||||
|
description: str = "Decide what is the appropriate greeting to use"
|
||||||
|
|
||||||
|
def _run(self) -> str:
|
||||||
|
return "Howdy!"
|
||||||
|
|
||||||
|
agent1 = Agent(
|
||||||
|
role="Friendly Neighbor",
|
||||||
|
goal="Make everyone feel welcome",
|
||||||
|
backstory="You are the friendly neighbor",
|
||||||
|
tools=[MyCustomTool(result_as_answer=True)],
|
||||||
|
)
|
||||||
|
|
||||||
|
greeting = Task(
|
||||||
|
description="Say an appropriate greeting.",
|
||||||
|
expected_output="The greeting.",
|
||||||
|
agent=agent1,
|
||||||
|
)
|
||||||
|
tasks = [greeting]
|
||||||
|
crew = Crew(agents=[agent1], tasks=tasks)
|
||||||
|
|
||||||
|
crew.kickoff()
|
||||||
|
assert agent1.tools_results == [
|
||||||
|
{
|
||||||
|
"result": "Howdy!",
|
||||||
|
"tool_name": "Decide Greetings",
|
||||||
|
"tool_args": {},
|
||||||
|
"result_as_answer": True,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_agent_llm_uses_token_calc_handler_with_llm_has_model_name():
|
def test_agent_llm_uses_token_calc_handler_with_llm_has_model_name():
|
||||||
agent1 = Agent(
|
agent1 = Agent(
|
||||||
role="test role",
|
role="test role",
|
||||||
@@ -734,7 +800,7 @@ def test_agent_llm_uses_token_calc_handler_with_llm_has_model_name():
|
|||||||
|
|
||||||
assert len(agent1.llm.callbacks) == 1
|
assert len(agent1.llm.callbacks) == 1
|
||||||
assert agent1.llm.callbacks[0].__class__.__name__ == "TokenCalcHandler"
|
assert agent1.llm.callbacks[0].__class__.__name__ == "TokenCalcHandler"
|
||||||
assert agent1.llm.callbacks[0].model == "gpt-4o"
|
assert agent1.llm.callbacks[0].model_name == "gpt-4o"
|
||||||
assert (
|
assert (
|
||||||
agent1.llm.callbacks[0].token_cost_process.__class__.__name__ == "TokenProcess"
|
agent1.llm.callbacks[0].token_cost_process.__class__.__name__ == "TokenProcess"
|
||||||
)
|
)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
585
tests/cassettes/test_crew_async_kickoff_for_each_full_ouput.yaml
Normal file
585
tests/cassettes/test_crew_async_kickoff_for_each_full_ouput.yaml
Normal file
@@ -0,0 +1,585 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are dog Researcher. You have a lot of experience
|
||||||
|
with dog.\nYour personal goal is: Express hot takes on dog.To give my best complete
|
||||||
|
final answer to the task use the exact following format:\n\nThought: I now can
|
||||||
|
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||||
|
final answer must be the great and the most complete as possible, it must be
|
||||||
|
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||||
|
Task: Give me an analysis around dog.\n\nThis is the expect criteria for your
|
||||||
|
final answer: 1 bullet point about dog that''s under 15 words. \n you MUST return
|
||||||
|
the actual complete content as the final answer, not a summary.\n\nBegin! This
|
||||||
|
is VERY important to you, use the tools available and give your best Final Answer,
|
||||||
|
your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '951'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.34.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.34.0
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.12.3
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Dogs"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
are"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
incredibly"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
loyal"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
creatures"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
and"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
make"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
excellent"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
companions"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGly5pkMQFPEoB5vefeCguR5lZg5","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89c8b85b1adac009-ATL
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Mon, 01 Jul 2024 19:14:38 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=EKr3m8uVLAQymRGlOvcrrYSniXqH.I6nlooc.HtxR58-1719861278-1.0.1.1-ShT9PH0Sv.qvbXjw_BhtziPUPaOIFBxlzXEIk_MXnfJ5PxggSkkaN25IKMZglSd3N2X.U2pWvFwywNQXiXlRnQ;
|
||||||
|
path=/; expires=Mon, 01-Jul-24 19:44:38 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=NdoJw5c7TqrbsjEH.ABD06WhM3d1BUh2BfsxOwuclSY-1719861278155-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '110'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9998'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999741'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 11ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_266fff38f6e0a997187154e25d6615e8
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are apple Researcher. You have a lot of
|
||||||
|
experience with apple.\nYour personal goal is: Express hot takes on apple.To
|
||||||
|
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||||
|
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||||
|
the task.\nYour final answer must be the great and the most complete as possible,
|
||||||
|
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||||
|
it!\nCurrent Task: Give me an analysis around apple.\n\nThis is the expect criteria
|
||||||
|
for your final answer: 1 bullet point about apple that''s under 15 words. \n
|
||||||
|
you MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||||
|
This is VERY important to you, use the tools available and give your best Final
|
||||||
|
Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '961'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.34.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.34.0
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.12.3
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":".\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Apple''s"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
ecosystem"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
fosters"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
seamless"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
integration"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
but"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
limit"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
user"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
flexibility"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
and"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
choice"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyTsxWJNVf7aQLLtKEYroHrIXk","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89c8b85b19501351-ATL
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Mon, 01 Jul 2024 19:14:38 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=R2LwekshY1B9Z5rF_RyOforiY4rLQreEXx6gOzhnZCI-1719861278-1.0.1.1-y_WShmsjsavKJEOt9Yw8nwjv05e4WdVaZGu8pYcS4z0wF9heVD.0C9W2aQodYxaWIQvkXiPm7y93ma7WCxUdBQ;
|
||||||
|
path=/; expires=Mon, 01-Jul-24 19:44:38 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=TLDVkWpa_eP62.b4QTrhowr4J_DsXwMZ2nGDaWD4ebU-1719861278245-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '99'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999780'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_39f52bd8b06ab4ada2c16853345eb6dc
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are cat Researcher. You have a lot of experience
|
||||||
|
with cat.\nYour personal goal is: Express hot takes on cat.To give my best complete
|
||||||
|
final answer to the task use the exact following format:\n\nThought: I now can
|
||||||
|
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||||
|
final answer must be the great and the most complete as possible, it must be
|
||||||
|
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||||
|
Task: Give me an analysis around cat.\n\nThis is the expect criteria for your
|
||||||
|
final answer: 1 bullet point about cat that''s under 15 words. \n you MUST return
|
||||||
|
the actual complete content as the final answer, not a summary.\n\nBegin! This
|
||||||
|
is VERY important to you, use the tools available and give your best Final Answer,
|
||||||
|
your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '951'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.34.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.34.0
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.12.3
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Cats"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
are"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
natural"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
hunters"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
with"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
unique"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
personalities"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
and"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
strong"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
territorial"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
instincts"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9gGlyAPhJWtbWCzqHs7gAID5K4T0X","object":"chat.completion.chunk","created":1719861278,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89c8b85b1fde4546-ATL
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Mon, 01 Jul 2024 19:14:38 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=KdRY51WYOZSedMQIfG_vPmrPdO67._RkSjV0nq.khUk-1719861278-1.0.1.1-r0uJtNVxaGm2OCkQliG.tsX3vekPCDQb2IET3ywu41Igu1Qfz01rhz_WlvKIllsZlXIyd6rvHT7NxLo.UOtD7Q;
|
||||||
|
path=/; expires=Mon, 01-Jul-24 19:44:38 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=Qqu1FBant56uM9Al0JmDl7QMk9cRcojzFUt8volvWPo-1719861278308-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '156'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999783'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_a0c6e5796af340d6720b9cfd55df703e
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
311
tests/cassettes/test_crew_full_output.yaml
Normal file
311
tests/cassettes/test_crew_full_output.yaml
Normal file
@@ -0,0 +1,311 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are test role. test backstory\nYour personal
|
||||||
|
goal is: test goalTo give my best complete final answer to the task use the
|
||||||
|
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
|
||||||
|
my best complete final answer to the task.\nYour final answer must be the great
|
||||||
|
and the most complete as possible, it must be outcome described.\n\nI MUST use
|
||||||
|
these formats, my job depends on it!\nCurrent Task: just say hi!\n\nThis is
|
||||||
|
the expect criteria for your final answer: your greeting \n you MUST return
|
||||||
|
the actual complete content as the final answer, not a summary.\n\nBegin! This
|
||||||
|
is VERY important to you, use the tools available and give your best Final Answer,
|
||||||
|
your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '853'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.9
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Hi"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRT6xi6C8Fz3S8GFGiottv1VnH","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89e32bb72dd86d5e-GIG
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Fri, 05 Jul 2024 00:17:13 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=uiZ_rH4TceDeZ.DjXTNE0hPKkvL49mU7mpYzwIIEFFM-1720138633-1.0.1.1-8SVfOrd0RHk4AFEXlnmXRJwgooX2qQwzM_m_nsg32Ln.boGk0NnqnlMfqpRgx0pcWpKoZLDOzVQ9iWuKUbXLgQ;
|
||||||
|
path=/; expires=Fri, 05-Jul-24 00:47:13 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=4JpRXthJb2jKE1c2ZJrXA42WVcOEN2OaE7UHDUyWLSk-1720138633250-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '84'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999808'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_533779597dc8ad44deead7d83922cae7
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are test role. test backstory\nYour personal
|
||||||
|
goal is: test goalTo give my best complete final answer to the task use the
|
||||||
|
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
|
||||||
|
my best complete final answer to the task.\nYour final answer must be the great
|
||||||
|
and the most complete as possible, it must be outcome described.\n\nI MUST use
|
||||||
|
these formats, my job depends on it!\nCurrent Task: just say hello!\n\nThis
|
||||||
|
is the expect criteria for your final answer: your greeting \n you MUST return
|
||||||
|
the actual complete content as the final answer, not a summary.\n\nThis is the
|
||||||
|
context you''re working with:\nHi!\n\nBegin! This is VERY important to you,
|
||||||
|
use the tools available and give your best Final Answer, your job depends on
|
||||||
|
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||||
|
"stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '905'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.9
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Hello"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQvRZn4XwZEsAA0VrSHe0HcePTyG","object":"chat.completion.chunk","created":1720138633,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89e32bbb3b2a6d5e-GIG
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Fri, 05 Jul 2024 00:17:15 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '1430'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999794'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_ba305909ef3f2a74b7ce2c33e2990b01
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
@@ -400126,7 +400126,7 @@ interactions:
|
|||||||
Content-Type:
|
Content-Type:
|
||||||
- application/octet-stream
|
- application/octet-stream
|
||||||
Date:
|
Date:
|
||||||
- Wed, 26 Jun 2024 00:47:57 GMT
|
- Sun, 23 Jun 2024 14:23:37 GMT
|
||||||
ETag:
|
ETag:
|
||||||
- '0x8DC736E200CD241'
|
- '0x8DC736E200CD241'
|
||||||
Last-Modified:
|
Last-Modified:
|
||||||
@@ -400138,128 +400138,24 @@ interactions:
|
|||||||
x-ms-lease-status:
|
x-ms-lease-status:
|
||||||
- unlocked
|
- unlocked
|
||||||
x-ms-request-id:
|
x-ms-request-id:
|
||||||
- 8134b189-f01e-002a-4362-c7d69f000000
|
- fcf86155-f01e-0015-1278-c51e3c000000
|
||||||
x-ms-version:
|
x-ms-version:
|
||||||
- '2009-09-19'
|
- '2009-09-19'
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
- request:
|
- request:
|
||||||
body: !!binary |
|
body: '{"messages": [{"content": "You are {topic} Researcher. You have a lot of
|
||||||
CqIhCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS+SAKEgoQY3Jld2FpLnRl
|
experience with {topic}.\nYour personal goal is: Express hot takes on {topic}.To
|
||||||
bGVtZXRyeRKTAQoQeFaYfr5OxyZaJbtlPmhykRIIyBZe8r8vT78qClRvb2wgVXNhZ2UwATmAmXe6
|
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||||
bGfcF0FgRXi6bGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJKHwoJdG9vbF9uYW1lEhIK
|
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||||
EGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABKTAQoQDWl8VO5eDhpC
|
the task.\nYour final answer must be the great and the most complete as possible,
|
||||||
8WTRhE7lIxIIjCV/tIXBg+gqClRvb2wgVXNhZ2UwATmQ09a7bGfcF0EYZNe7bGfcF0oaCg5jcmV3
|
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||||
YWlfdmVyc2lvbhIICgYwLjMyLjJKHwoJdG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoI
|
it!\nCurrent Task: Give me an analysis around dog.\n\nThis is the expect criteria
|
||||||
YXR0ZW1wdHMSAhgBegIYAYUBAAEAABKTAQoQdZyAk3NfWDJ95Rl9fwm9JBIIeOLAjtRHX8YqClRv
|
for your final answer: 1 bullet point about dog that''s under 15 words. \n you
|
||||||
b2wgVXNhZ2UwATn4LiPAbGfcF0Hw1iPAbGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJK
|
MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||||
HwoJdG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEA
|
This is VERY important to you, use the tools available and give your best Final
|
||||||
ABLQAQoQ9B/WTcQbD55N9my9PwwJ/RII61GsN4ZK8lsqEFRvb2wgVXNhZ2UgRXJyb3IwATn4XSTC
|
Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
bGfcF0F4GSXCbGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJKZgoDbGxtEl8KXXsibmFt
|
|
||||||
ZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC00LTAxMjUtcHJldmlldyIsICJ0ZW1wZXJhdHVy
|
|
||||||
ZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAGFAQABAAAS0AEKEN0yjmyVck//dsMA
|
|
||||||
AJt7gVsSCE64+GH0u+SCKhBUb29sIFVzYWdlIEVycm9yMAE5iIgTw2xn3BdBODwUw2xn3BdKGgoO
|
|
||||||
Y3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAibW9kZWxf
|
|
||||||
bmFtZSI6ICJncHQtNC0wMTI1LXByZXZpZXciLCAidGVtcGVyYXR1cmUiOiAwLjcsICJjbGFzcyI6
|
|
||||||
ICJDaGF0T3BlbkFJIn16AhgBhQEAAQAAEtABChDHSt0G/EqmGlI1IdhjDtotEgj83Lu7mYSDsyoQ
|
|
||||||
VG9vbCBVc2FnZSBFcnJvcjABOZCYGMRsZ9wXQXBEGcRsZ9wXShoKDmNyZXdhaV92ZXJzaW9uEggK
|
|
||||||
BjAuMzIuMkpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTQtMDEy
|
|
||||||
NS1wcmV2aWV3IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIY
|
|
||||||
AYUBAAEAABJoChBpV7cx2tTNYorpmU9Iezq6EgjT9Wm5SX16tioQVG9vbCBVc2FnZSBFcnJvcjAB
|
|
||||||
ObDiWsdsZ9wXQfB+W8dsZ9wXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzIuMnoCGAGFAQABAAAS
|
|
||||||
kwEKECe14QwK8YEX5k/HAyTJJOESCLC5kWxi9XjbKgpUb29sIFVzYWdlMAE5AHmbyGxn3BdBcA2c
|
|
||||||
yGxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh8KCXRvb2xfbmFtZRISChBnZXRfZmlu
|
|
||||||
YWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAASkwEKECkLzxPcPFShQTRBKOuA5VwS
|
|
||||||
CEXEpf9vgT0qKgpUb29sIFVzYWdlMAE5uIX9yWxn3BdBiAr+yWxn3BdKGgoOY3Jld2FpX3ZlcnNp
|
|
||||||
b24SCAoGMC4zMi4ySh8KCXRvb2xfbmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRz
|
|
||||||
EgIYAXoCGAGFAQABAAASkwEKELKxl/VCmmnFuHppNDaUdLYSCMxTFAfY7LKZKgpUb29sIFVzYWdl
|
|
||||||
MAE5cEBDy2xn3BdBKMlDy2xn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh8KCXRvb2xf
|
|
||||||
bmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAASaAoQWsd1
|
|
||||||
ufYSiTjFfOLUvTXiGhIIUqeG6O/KOEsqEFRvb2wgVXNhZ2UgRXJyb3IwATnonMXObGfcF0FANcbO
|
|
||||||
bGfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJ6AhgBhQEAAQAAEpwBChDePaRaxJbHM9ti
|
|
||||||
mgjFTw4eEgiJJ6aTsD4j1CoKVG9vbCBVc2FnZTABObD+Q9FsZ9wXQSCTRNFsZ9wXShoKDmNyZXdh
|
|
||||||
aV92ZXJzaW9uEggKBjAuMzIuMkooCgl0b29sX25hbWUSGwoZRGVsZWdhdGUgd29yayB0byBjb3dv
|
|
||||||
cmtlckoOCghhdHRlbXB0cxICGAF6AhgBhQEAAQAAEpsBChB8AxpI4Iiqli5YpSzNuAHkEgiuoKKr
|
|
||||||
AonvqioKVG9vbCBVc2FnZTABOQhPr9ZsZ9wXQRjzr9ZsZ9wXShoKDmNyZXdhaV92ZXJzaW9uEggK
|
|
||||||
BjAuMzIuMkonCgl0b29sX25hbWUSGgoYQXNrIHF1ZXN0aW9uIHRvIGNvd29ya2VySg4KCGF0dGVt
|
|
||||||
cHRzEgIYAXoCGAGFAQABAAASkQEKEHWST7FZaG5Ij0kYfqRwQFISCGo4iAiKiKriKgpUb29sIFVz
|
|
||||||
YWdlMAE5qKLl2Wxn3BdBuEbm2Wxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0KCXRv
|
|
||||||
b2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAF6AhgBhQEAAQAAEpEBChBW
|
|
||||||
s1jZuOvGnPk5qZAn/3bvEghcLUd+91YchSoKVG9vbCBVc2FnZTABOVBUJttsZ9wXQfDgJttsZ9wX
|
|
||||||
ShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzIuMkodCgl0b29sX25hbWUSEAoObGVhcm5fYWJvdXRf
|
|
||||||
QUlKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABJoChB390oxWJKggBM1q5uqyAIWEgilZ4dcV9H3
|
|
||||||
zyoQVG9vbCBVc2FnZSBFcnJvcjABObCnTtxsZ9wXQYAsT9xsZ9wXShoKDmNyZXdhaV92ZXJzaW9u
|
|
||||||
EggKBjAuMzIuMnoCGAGFAQABAAAS+QEKEGWFgsouWvCkBbw9hOp41fkSCHuQEq4LICdiKgpUb29s
|
|
||||||
IFVzYWdlMAE5aLsF4Wxn3BdBcIoG4Wxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0K
|
|
||||||
CXRvb2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAFKZgoDbGxtEl8KXXsi
|
|
||||||
bmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC0zLjUtdHVyYm8tMDEyNSIsICJ0ZW1wZXJh
|
|
||||||
dHVyZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAGFAQABAAAS+QEKEH74JqRDD3nP
|
|
||||||
pYFzHXVuRBMSCOVLhi1+G6VtKgpUb29sIFVzYWdlMAE5GDRy4mxn3BdB+N9y4mxn3BdKGgoOY3Jl
|
|
||||||
d2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0KCXRvb2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghh
|
|
||||||
dHRlbXB0cxICGAFKZgoDbGxtEl8KXXsibmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC0z
|
|
||||||
LjUtdHVyYm8tMDEyNSIsICJ0ZW1wZXJhdHVyZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUki
|
|
||||||
fXoCGAGFAQABAAAS+QEKELEPPr1EMWxY9eCI/9VxYRISCBifMoJVgs46KgpUb29sIFVzYWdlMAE5
|
|
||||||
WMIV5Gxn3BdBOG4W5Gxn3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySh0KCXRvb2xfbmFt
|
|
||||||
ZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAFKZgoDbGxtEl8KXXsibmFtZSI6IG51
|
|
||||||
bGwsICJtb2RlbF9uYW1lIjogImdwdC0zLjUtdHVyYm8tMDEyNSIsICJ0ZW1wZXJhdHVyZSI6IDAu
|
|
||||||
NywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAGFAQABAAASnAEKEIj74gWv+Pwo4caZDgnygaYS
|
|
||||||
CETmyodLQY8/KgpUb29sIFVzYWdlMAE5sBTAFm1n3BdBANjAFm1n3BdKGgoOY3Jld2FpX3ZlcnNp
|
|
||||||
b24SCAoGMC4zMi4ySigKCXRvb2xfbmFtZRIbChlEZWxlZ2F0ZSB3b3JrIHRvIGNvd29ya2VySg4K
|
|
||||||
CGF0dGVtcHRzEgIYAXoCGAGFAQABAAASnAEKEDq45gMODvysUf5wQQf4F+0SCHZHV4/9LnhnKgpU
|
|
||||||
b29sIFVzYWdlMAE5EDHQIG1n3BdBwOTQIG1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4y
|
|
||||||
SigKCXRvb2xfbmFtZRIbChlEZWxlZ2F0ZSB3b3JrIHRvIGNvd29ya2VySg4KCGF0dGVtcHRzEgIY
|
|
||||||
AXoCGAGFAQABAAASnAEKEJHNsGUrqNbXyAOQPZnty9kSCAASe0iLg1zKKgpUb29sIFVzYWdlMAE5
|
|
||||||
kCpPLW1n3BdBmPlPLW1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4ySigKCXRvb2xfbmFt
|
|
||||||
ZRIbChlEZWxlZ2F0ZSB3b3JrIHRvIGNvd29ya2VySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAAS
|
|
||||||
jQEKEP5ZHkp/aYaAyGuu53Anw2kSCDu8tanZxniRKgpUb29sIFVzYWdlMAE5iFVHWG1n3BdBUAVI
|
|
||||||
WG1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4yShkKCXRvb2xfbmFtZRIMCgptdWx0aXBs
|
|
||||||
aWVySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQABAAASjQEKEFPBMl1rYMxgfMdCym2d+yUSCIu9WYA8
|
|
||||||
9juvKgpUb29sIFVzYWdlMAE5YK3oWW1n3BdB6D3pWW1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoG
|
|
||||||
MC4zMi4yShkKCXRvb2xfbmFtZRIMCgptdWx0aXBsaWVySg4KCGF0dGVtcHRzEgIYAXoCGAGFAQAB
|
|
||||||
AAASaAoQCQJWjJpe1L2HUfWpwOVCVRIIb/BALasmjQEqEFRvb2wgVXNhZ2UgRXJyb3IwATmAGdJc
|
|
||||||
bWfcF0F4wdJcbWfcF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMyLjJ6AhgBhQEAAQAAEmgKED/Z
|
|
||||||
tmI7I3o+4n/FgbLHhtMSCGEtw+VqWPc+KhBUb29sIFVzYWdlIEVycm9yMAE5aDyyXW1n3BdBIMWy
|
|
||||||
XW1n3BdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMi4yegIYAYUBAAEAAA==
|
|
||||||
headers:
|
|
||||||
Accept:
|
|
||||||
- '*/*'
|
|
||||||
Accept-Encoding:
|
|
||||||
- gzip, deflate, br
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Length:
|
|
||||||
- '4261'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
User-Agent:
|
|
||||||
- OTel-OTLP-Exporter-Python/1.25.0
|
|
||||||
method: POST
|
|
||||||
uri: https://telemetry.crewai.com:4319/v1/traces
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: "\n\0"
|
|
||||||
headers:
|
|
||||||
Content-Length:
|
|
||||||
- '2'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
Date:
|
|
||||||
- Wed, 26 Jun 2024 00:47:59 GMT
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
- request:
|
|
||||||
body: '{"messages": [{"content": "You are test role. test backstory\nYour personal
|
|
||||||
goal is: test goalTo give my best complete final answer to the task use the
|
|
||||||
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
|
|
||||||
my best complete final answer to the task.\nYour final answer must be the great
|
|
||||||
and the most complete as possible, it must be outcome described.\n\nI MUST use
|
|
||||||
these formats, my job depends on it!\nCurrent Task: just say hi!\n\nThis is
|
|
||||||
the expect criteria for your final answer: your greeting \n you MUST return
|
|
||||||
the actual complete content as the final answer, not a summary.\n\nBegin! This
|
|
||||||
is VERY important to you, use the tools available and give your best Final Answer,
|
|
||||||
your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
|
||||||
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
@@ -400269,13 +400165,13 @@ interactions:
|
|||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '853'
|
- '963'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.35.3
|
- OpenAI/Python 1.34.0
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -400285,73 +400181,116 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.35.3
|
- 1.34.0
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
- 3.11.9
|
- 3.12.3
|
||||||
method: POST
|
method: POST
|
||||||
uri: https://api.openai.com/v1/chat/completions
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: 'data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
string: 'data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
I"},"logprobs":null,"finish_reason":null}]}
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
now"},"logprobs":null,"finish_reason":null}]}
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
can"},"logprobs":null,"finish_reason":null}]}
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
give"},"logprobs":null,"finish_reason":null}]}
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
great"},"logprobs":null,"finish_reason":null}]}
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
answer"},"logprobs":null,"finish_reason":null}]}
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
Hi"},"logprobs":null,"finish_reason":null}]}
|
Dogs"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
are"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7HGovGTNNv8jlZBbEHKCMAa2Sn","object":"chat.completion.chunk","created":1719362879,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
loyal"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
companions"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
and"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
enhance"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
human"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
well"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"-being"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
through"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
emotional"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
support"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIPzOXprkfuIluBhM1o1gL8lm3yu","object":"chat.completion.chunk","created":1719152619,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
data: [DONE]
|
data: [DONE]
|
||||||
@@ -400362,20 +400301,20 @@ interactions:
|
|||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 8999306a3c2b97ff-SJC
|
- 8985231e8e72453b-ATL
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- text/event-stream; charset=utf-8
|
- text/event-stream; charset=utf-8
|
||||||
Date:
|
Date:
|
||||||
- Wed, 26 Jun 2024 00:48:00 GMT
|
- Sun, 23 Jun 2024 14:23:39 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Set-Cookie:
|
Set-Cookie:
|
||||||
- __cf_bm=cZ1tqEJHf90QH5Wpf8u5w22pMiXEVmHqh.l_FNQzPUE-1719362880-1.0.1.1-ukAMHHbJtb1dXgdiJ9rZPnK9Dr9955CracMEW21Vc4n80TvL8Aj_Ke_B7G85jBPWrNAAQ0LYFLomosxQMd_qSg;
|
- __cf_bm=eo7Wv_7IoFobCr06vVNbn6fiTV000XuWJ5mG6A5XfCI-1719152619-1.0.1.1-VWt6JrnrLWxl1Heg2Mc9q1an5j9.JHISIS2VP5qbC_YCwRn5WkI_QykzIBPo8kil7ndx45QBS0fUvVDo22tWKQ;
|
||||||
path=/; expires=Wed, 26-Jun-24 01:18:00 GMT; domain=.api.openai.com; HttpOnly;
|
path=/; expires=Sun, 23-Jun-24 14:53:39 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
Secure; SameSite=None
|
Secure; SameSite=None
|
||||||
- _cfuvid=jB8ShrBkfWJf2HStRWtCrxPo9TmM70i91UOQVSXB_ik-1719362880048-0.0.1.1-604800000;
|
- _cfuvid=zHfpY3hveQ2EtdkjxmAEpZPcSMSo.R1IXEnggmqkQEI-1719152619731-0.0.1.1-604800000;
|
||||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
@@ -400384,41 +400323,41 @@ interactions:
|
|||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '766'
|
- '115'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=31536000; includeSubDomains
|
- max-age=15724800; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '16000000'
|
- '12000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '15999808'
|
- '11999780'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 0s
|
- 1ms
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- e2e310e93ef808475e7ac73c1ef60ec0
|
- req_6e1809edb78fae06d8a20ed070c6e64a
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
- request:
|
- request:
|
||||||
body: '{"messages": [{"content": "You are test role. test backstory\nYour personal
|
body: '{"messages": [{"content": "You are {topic} Researcher. You have a lot of
|
||||||
goal is: test goalTo give my best complete final answer to the task use the
|
experience with {topic}.\nYour personal goal is: Express hot takes on {topic}.To
|
||||||
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
|
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||||
my best complete final answer to the task.\nYour final answer must be the great
|
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||||
and the most complete as possible, it must be outcome described.\n\nI MUST use
|
the task.\nYour final answer must be the great and the most complete as possible,
|
||||||
these formats, my job depends on it!\nCurrent Task: just say hello!\n\nThis
|
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||||
is the expect criteria for your final answer: your greeting \n you MUST return
|
it!\nCurrent Task: Give me an analysis around cat.\n\nThis is the expect criteria
|
||||||
the actual complete content as the final answer, not a summary.\n\nThis is the
|
for your final answer: 1 bullet point about cat that''s under 15 words. \n you
|
||||||
context you''re working with:\nHi!\n\nBegin! This is VERY important to you,
|
MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||||
use the tools available and give your best Final Answer, your job depends on
|
This is VERY important to you, use the tools available and give your best Final
|
||||||
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
"stream": true, "temperature": 0.7}'
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
@@ -400427,16 +400366,16 @@ interactions:
|
|||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '905'
|
- '963'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
cookie:
|
cookie:
|
||||||
- __cf_bm=cZ1tqEJHf90QH5Wpf8u5w22pMiXEVmHqh.l_FNQzPUE-1719362880-1.0.1.1-ukAMHHbJtb1dXgdiJ9rZPnK9Dr9955CracMEW21Vc4n80TvL8Aj_Ke_B7G85jBPWrNAAQ0LYFLomosxQMd_qSg;
|
- __cf_bm=eo7Wv_7IoFobCr06vVNbn6fiTV000XuWJ5mG6A5XfCI-1719152619-1.0.1.1-VWt6JrnrLWxl1Heg2Mc9q1an5j9.JHISIS2VP5qbC_YCwRn5WkI_QykzIBPo8kil7ndx45QBS0fUvVDo22tWKQ;
|
||||||
_cfuvid=jB8ShrBkfWJf2HStRWtCrxPo9TmM70i91UOQVSXB_ik-1719362880048-0.0.1.1-604800000
|
_cfuvid=zHfpY3hveQ2EtdkjxmAEpZPcSMSo.R1IXEnggmqkQEI-1719152619731-0.0.1.1-604800000
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.35.3
|
- OpenAI/Python 1.34.0
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -400446,73 +400385,116 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.35.3
|
- 1.34.0
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
- 3.11.9
|
- 3.12.3
|
||||||
method: POST
|
method: POST
|
||||||
uri: https://api.openai.com/v1/chat/completions
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: 'data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
string: 'data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
I"},"logprobs":null,"finish_reason":null}]}
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
now"},"logprobs":null,"finish_reason":null}]}
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
can"},"logprobs":null,"finish_reason":null}]}
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
give"},"logprobs":null,"finish_reason":null}]}
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
great"},"logprobs":null,"finish_reason":null}]}
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
answer"},"logprobs":null,"finish_reason":null}]}
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
Hello"},"logprobs":null,"finish_reason":null}]}
|
Cats"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
are"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9eB7I99F3dYCupsw3MZZeCiuDZbnU","object":"chat.completion.chunk","created":1719362880,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5bf7397cd3","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
independent"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
curious"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
creatures"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
with"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
strong"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
sense"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
of"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
territory"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ0Z7g9gLoEyFe4tuHZY3zDHIxh","object":"chat.completion.chunk","created":1719152620,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
data: [DONE]
|
data: [DONE]
|
||||||
@@ -400523,13 +400505,13 @@ interactions:
|
|||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 89993072097897ff-SJC
|
- 898523253f8c453b-ATL
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- text/event-stream; charset=utf-8
|
- text/event-stream; charset=utf-8
|
||||||
Date:
|
Date:
|
||||||
- Wed, 26 Jun 2024 00:48:00 GMT
|
- Sun, 23 Jun 2024 14:23:40 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
@@ -400539,25 +400521,229 @@ interactions:
|
|||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '134'
|
- '121'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=31536000; includeSubDomains
|
- max-age=15724800; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '16000000'
|
- '12000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '15999794'
|
- '11999779'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 0s
|
- 1ms
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- 95f7e7603df593de5f778bfbd77838c9
|
- req_fcdeaba57c8c0b5c80fdaaf4f3949da3
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are {topic} Researcher. You have a lot of
|
||||||
|
experience with {topic}.\nYour personal goal is: Express hot takes on {topic}.To
|
||||||
|
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||||
|
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||||
|
the task.\nYour final answer must be the great and the most complete as possible,
|
||||||
|
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||||
|
it!\nCurrent Task: Give me an analysis around apple.\n\nThis is the expect criteria
|
||||||
|
for your final answer: 1 bullet point about apple that''s under 15 words. \n
|
||||||
|
you MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||||
|
This is VERY important to you, use the tools available and give your best Final
|
||||||
|
Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '967'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
cookie:
|
||||||
|
- __cf_bm=eo7Wv_7IoFobCr06vVNbn6fiTV000XuWJ5mG6A5XfCI-1719152619-1.0.1.1-VWt6JrnrLWxl1Heg2Mc9q1an5j9.JHISIS2VP5qbC_YCwRn5WkI_QykzIBPo8kil7ndx45QBS0fUvVDo22tWKQ;
|
||||||
|
_cfuvid=zHfpY3hveQ2EtdkjxmAEpZPcSMSo.R1IXEnggmqkQEI-1719152619731-0.0.1.1-604800000
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.34.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.34.0
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.12.3
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":".\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Apple"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
revolution"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"ized"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
technology"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
with"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
i"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"Phone"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
transforming"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
communication"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
and"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
media"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"
|
||||||
|
consumption"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9dIQ14W5gaHA4p3dYi6n7FrRBnzhN","object":"chat.completion.chunk","created":1719152621,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_5e6c71d4a8","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 8985232a4e71453b-ATL
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Sun, 23 Jun 2024 14:23:41 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '108'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=15724800; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '12000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '11999779'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 1ms
|
||||||
|
x-request-id:
|
||||||
|
- req_d8c11c52c26f88d19413eb84f91587da
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
258
tests/cassettes/test_custom_converter_cls.yaml
Normal file
258
tests/cassettes/test_custom_converter_cls.yaml
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||||
|
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||||
|
final answer to the task use the exact following format:\n\nThought: I now can
|
||||||
|
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||||
|
final answer must be the great and the most complete as possible, it must be
|
||||||
|
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||||
|
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||||
|
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||||
|
answer: The score of the title. \n you MUST return the actual complete content
|
||||||
|
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||||
|
use the tools available and give your best Final Answer, your job depends on
|
||||||
|
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||||
|
"stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '997'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.7
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89ed0cf0dc05741a-MIA
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Sat, 06 Jul 2024 05:03:50 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=JI76H4xxreAnMx1JJoPragplAdYdjbDNA68Hr3Cs_0k-1720242230-1.0.1.1-oHSrtm.ejkvCiAHC11lg0MnvmopYZayTZRq09IcH2yh5BA6FyyufGH7Rm59BAz.gdZHc0izmjElXfLiu2bZ_jQ;
|
||||||
|
path=/; expires=Sat, 06-Jul-24 05:33:50 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=X4.n0cNP9j1jseIPV4H1aDJu2xrsAwcUI8rY0tbLc40-1720242230210-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '71'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999772'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_8dc1d49d85fcf8e39601e32ca80abd6b
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||||
|
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||||
|
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||||
|
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||||
|
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||||
|
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||||
|
["score"], "type": "object"}}}]}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '519'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
cookie:
|
||||||
|
- __cf_bm=JI76H4xxreAnMx1JJoPragplAdYdjbDNA68Hr3Cs_0k-1720242230-1.0.1.1-oHSrtm.ejkvCiAHC11lg0MnvmopYZayTZRq09IcH2yh5BA6FyyufGH7Rm59BAz.gdZHc0izmjElXfLiu2bZ_jQ;
|
||||||
|
_cfuvid=X4.n0cNP9j1jseIPV4H1aDJu2xrsAwcUI8rY0tbLc40-1720242230210-0.0.1.1-604800000
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.7
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: !!binary |
|
||||||
|
H4sIAAAAAAAAA2xSS2/bMAy++1cIPNeF81pT34YBG9A13aHAgL5gKArtKJVFTaKBtkH++yDFi91g
|
||||||
|
PggEP34PkN5nQoDeQClAbSWr1pn8euvD6id9OP3V3C6bzZ2qd7eT1f3DykgFF5FB6x0q/se6VNQ6
|
||||||
|
g6zJHmHlUTJG1cnVtJjOp9NZkYCWNmgirXGczymPYF4s8smsJ25JKwxQiqdMCCH26Y0R7QbfoBRJ
|
||||||
|
JnVaDEE2COVpSAjwZGIHZAg6sLQMFwOoyDLamNp2xowAJjKVksYMxsdvP6qHPUljqsI2duX13ePN
|
||||||
|
w2/7HT/+8Lfd45cfYeR3lH53KVDdWXXazwg/9cszMyHAyjZx7xV5/NWx6/iMLgRI33QtWo7RYf8M
|
||||||
|
IQ4/Qzk/wKfRQ/a/+qWvDqe1Gmqcp3U42xLU2uqwrTzKkNJCYHJHiyj3ks7XfboIOE+t44rpFW0U
|
||||||
|
XPbXg+F/GcBFjzGxNCPOIuvjQXgPjG1Va9ugd16nU0LtqnlRLHG2vppcQ3bI/gIAAP//AwCtLU45
|
||||||
|
0wIAAA==
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89ed0cf40ebc741a-MIA
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Encoding:
|
||||||
|
- gzip
|
||||||
|
Content-Type:
|
||||||
|
- application/json
|
||||||
|
Date:
|
||||||
|
- Sat, 06 Jul 2024 05:03:50 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '186'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999969'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_5da164d15ccb331864aeb5d3562969aa
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,82 +1,33 @@
|
|||||||
interactions:
|
interactions:
|
||||||
- request:
|
- request:
|
||||||
body: !!binary |
|
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||||
Cp4ICiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS9QcKEgoQY3Jld2FpLnRl
|
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||||
bGVtZXRyeRLeBwoQLUn1O+7mxvq8vHmZBVzP4hII4O2Vm5CqaQMqDENyZXcgQ3JlYXRlZDABOQj8
|
final answer to the task use the exact following format:\n\nThought: I now can
|
||||||
EY+F/7gXQZhVFI+F/7gXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTYuM0oaCg5weXRob25fdmVy
|
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQxNGEyZTE0YS0yZmU3LTQxMTEtYWI4OS05YTRl
|
final answer must be the great and the most complete as possible, it must be
|
||||||
YmI2MmQ2OGFKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||||
EgIYAUrJAgoLY3Jld19hZ2VudHMSuQIKtgJbeyJpZCI6ICIyNWE2NzNmNy1kM2NkLTQxMWYtOWFh
|
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||||
Ni0wODZlY2IzMTgyODUiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogZmFs
|
answer: The score of the title. \n you MUST return the actual complete content
|
||||||
c2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAi
|
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
use the tools available and give your best Final Answer, your job depends on
|
||||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoYBCgpj
|
"stream": true, "temperature": 0.7}'
|
||||||
cmV3X3Rhc2tzEngKdlt7ImlkIjogIjYxMjIyYjU0LTFlNDYtNDhlYy1hNTkwLTU3Y2VkNTI1OTE0
|
|
||||||
OSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0
|
|
||||||
b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRi
|
|
||||||
aXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRh
|
|
||||||
cndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6
|
|
||||||
IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxF
|
|
||||||
QVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYAQ==
|
|
||||||
headers:
|
|
||||||
Accept:
|
|
||||||
- '*/*'
|
|
||||||
Accept-Encoding:
|
|
||||||
- gzip, deflate, br
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Length:
|
|
||||||
- '1057'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
User-Agent:
|
|
||||||
- OTel-OTLP-Exporter-Python/1.23.0
|
|
||||||
method: POST
|
|
||||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: "\n\0"
|
|
||||||
headers:
|
|
||||||
Content-Length:
|
|
||||||
- '2'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
Date:
|
|
||||||
- Sat, 02 Mar 2024 16:30:10 GMT
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
- request:
|
|
||||||
body: '{"messages": [{"role": "user", "content": "You are Scorer. You''re an expert
|
|
||||||
scorer, specialized in scoring titles.\nYour personal goal is: Score the titleTo
|
|
||||||
give my best complete final answer to the task use the exact following format:\n\nThought:
|
|
||||||
I now can give a great answer\nFinal Answer: my best complete final answer to
|
|
||||||
the task.\nYour final answer must be the great and the most complete as possible,
|
|
||||||
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
|
||||||
it!\n\nThought: \n\nCurrent Task: Give me an integer score between 1-5 for the
|
|
||||||
following title: ''The impact of AI in the future of work''\n\nThis is the expect
|
|
||||||
criteria for your final answer: The score of the title. \n you MUST return the
|
|
||||||
actual complete content as the final answer, not a summary.\n\nBegin! This is
|
|
||||||
VERY important to you, use the tools available and give your best Final Answer,
|
|
||||||
your job depends on it!\n\nThought: \n"}], "model": "gpt-4", "n": 1, "stop":
|
|
||||||
["\nObservation"], "stream": true, "temperature": 0.7}'
|
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '1012'
|
- '997'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.12.0
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -86,7 +37,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.12.0
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -95,424 +46,64 @@ interactions:
|
|||||||
uri: https://api.openai.com/v1/chat/completions
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: 'data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
string: 'data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
impact"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
AI"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
future"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
work"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
is"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
highly"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
relevant"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
today"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''s"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
world"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
where"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
artificial"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
intelligence"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
is"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
increasingly"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
becoming"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
part"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
our"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
daily"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
lives"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
It"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
is"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
future"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"-focused"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
topic"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
hint"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ing"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
at"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
exploration"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
how"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
AI"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
could"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
potentially"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
transform"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
workforce"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
The"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
is"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
clear"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
concise"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
and"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
thought"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"-pro"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"v"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"oking"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
However"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
it"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
could"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
be"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
more"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
engaging"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
if"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
it"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
posed"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
question"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
or"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
suggested"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
particular"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
stance"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
or"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
perspective"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
on"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
issue"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
\n\n"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
I"},"logprobs":null,"finish_reason":null}]}
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
would"},"logprobs":null,"finish_reason":null}]}
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
give"},"logprobs":null,"finish_reason":null}]}
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
score"},"logprobs":null,"finish_reason":null}]}
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
"},"logprobs":null,"finish_reason":null}]}
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
out"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: [DONE]
|
data: [DONE]
|
||||||
@@ -523,83 +114,76 @@ interactions:
|
|||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 85e2c5046bf300ef-GRU
|
- 89de840c8999da1f-MIA
|
||||||
Cache-Control:
|
|
||||||
- no-cache, must-revalidate
|
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- text/event-stream
|
- text/event-stream; charset=utf-8
|
||||||
Date:
|
Date:
|
||||||
- Sat, 02 Mar 2024 16:30:08 GMT
|
- Thu, 04 Jul 2024 10:43:42 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Set-Cookie:
|
Set-Cookie:
|
||||||
- __cf_bm=.grddhTZJdLEJv6GuMnT78ns0mXkV0eSjYeNYhv3dUI-1709397008-1.0.1.1-2sKuVrpd4U3_FRXASU35ZcGL9Q2OZaWMHt3BdGQulV3dZKzn_JyPxnVdEkAM9jjGll2htywS.q3gux93Qh_RjA;
|
- __cf_bm=2Hu0ME75.DNvnesykox1YodaKBclvB_2BKv5lSWfLl0-1720089822-1.0.1.1-4HrDYxtEuvqeonoEr_FZXY8l5Fn2Q1Z08vA.lJKhLWn1bRSsZ.FcJUbAeXPIPGvh6vlSidcfl2yXwKmVe5SyRQ;
|
||||||
path=/; expires=Sat, 02-Mar-24 17:00:08 GMT; domain=.api.openai.com; HttpOnly;
|
path=/; expires=Thu, 04-Jul-24 11:13:42 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
Secure; SameSite=None
|
Secure; SameSite=None
|
||||||
- _cfuvid=rikyw.atvG.At.eOQtg3heSMQKPGLGM66nq3EHbnUZ0-1709397008590-0.0.1.1-604800000;
|
- _cfuvid=7l4V6F6gFvbHLEpNNJMf6OwfmVG.lHcwS8czqjpjDTY-1720089822487-0.0.1.1-604800000;
|
||||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
access-control-allow-origin:
|
|
||||||
- '*'
|
|
||||||
alt-svc:
|
alt-svc:
|
||||||
- h3=":443"; ma=86400
|
- h3=":443"; ma=86400
|
||||||
openai-model:
|
|
||||||
- gpt-4-0613
|
|
||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '250'
|
- '120'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '300000'
|
- '16000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '299768'
|
- '15999772'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 46ms
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_c05576522c6a556b562ddcf905cd08ee
|
- req_16b0326c59b800232bd3b81982efca66
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
- request:
|
- request:
|
||||||
body: '{"messages": [{"role": "user", "content": "I would give the title a score
|
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||||
of 4 out of 5."}, {"role": "system", "content": "I''m gonna convert this raw
|
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||||
text into valid JSON."}], "model": "gpt-4", "tool_choice": {"type": "function",
|
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||||
"function": {"name": "ScoreOutput"}}, "tools": [{"type": "function", "function":
|
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||||
{"name": "ScoreOutput", "description": "Correctly extracted `ScoreOutput` with
|
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||||
all the required parameters with correct types", "parameters": {"properties":
|
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||||
{"score": {"title": "Score", "type": "integer"}}, "required": ["score"], "type":
|
["score"], "type": "object"}}}]}'
|
||||||
"object"}}}]}'
|
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '562'
|
- '519'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
cookie:
|
cookie:
|
||||||
- __cf_bm=.grddhTZJdLEJv6GuMnT78ns0mXkV0eSjYeNYhv3dUI-1709397008-1.0.1.1-2sKuVrpd4U3_FRXASU35ZcGL9Q2OZaWMHt3BdGQulV3dZKzn_JyPxnVdEkAM9jjGll2htywS.q3gux93Qh_RjA;
|
- __cf_bm=2Hu0ME75.DNvnesykox1YodaKBclvB_2BKv5lSWfLl0-1720089822-1.0.1.1-4HrDYxtEuvqeonoEr_FZXY8l5Fn2Q1Z08vA.lJKhLWn1bRSsZ.FcJUbAeXPIPGvh6vlSidcfl2yXwKmVe5SyRQ;
|
||||||
_cfuvid=rikyw.atvG.At.eOQtg3heSMQKPGLGM66nq3EHbnUZ0-1709397008590-0.0.1.1-604800000
|
_cfuvid=7l4V6F6gFvbHLEpNNJMf6OwfmVG.lHcwS8czqjpjDTY-1720089822487-0.0.1.1-604800000
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.12.0
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -609,7 +193,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.12.0
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -619,60 +203,55 @@ interactions:
|
|||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: !!binary |
|
string: !!binary |
|
||||||
ISALACDGO+f7GHXv9AhjPyCk5VBp4/m/Gbg0aA+KxtQ2/dItGPYCmVqiCQTU2dk6F7c79HpkTXh2
|
H4sIAAAAAAAAA2xSS2vjMBC++1eIOcfFtZNt4lsXCkuhj20he9gWo8hjR4msEZLcJoT890WOG7th
|
||||||
KwCmASWokr9aGa/bs/XV6+Pt6bp5uB3qfBBfP/THr8YsL5amemJLAHT/WajqnnjHxUBNnYUgJ5ef
|
fRDDfPM9mPEhYgxkCTkDseZeNEbFi/Xd3a1Ltj/Sj336uXxpVLmp6Zd8XFYPTzAJDFptUPgv1pWg
|
||||||
hQEl+tPefDif9vp94R3jglBTgrGv26N2b9If9jpNXKrCihIfAgC2pRHAd++7lOi1WhEvDIES+BHA
|
xij0kvQJFha5x6B6fZMmyXwxT9MOaKhEFWi18fGU4jRJp3Eyi6+znrgmKdBBzv5GjDF26N4QUZe4
|
||||||
0umQEvyrqrSq/2zNFiAXtm5SwjZaC+dq5/SP+tOaHMrBbT23SH/90/rn7jw6mL6a2/fL/k3vcRXN
|
g5wlk69Og87xGiE/DzEGllToAHdOOs+1h8kACtIedUitW6VGgCdSheBKDcan7zCqhz1xpQr7vN9k
|
||||||
3o7WVeFPOCH1zNoPCvXAKMWgsjpJGAFo/8yE3npQrgxvmto3tQoDoIl/TwluPy3wyUq5MvykxOjT
|
7a583NHvlxXOtvXzzz+3y/uR30l6b7pAVavFeT8j/NzPL8wYA82bjvsqyOJT603rL+iMAbd126D2
|
||||||
7qm6cS/q2Bc9vqdYRu1iX7r/asFwRq0Dfm4IG6MEq9p5GmsvgC9LVhvjXKEvnfH1T+3y0FaUmPdV
|
IToc3sCF4TfIp0f4NnqM/le/99XxvFZFtbG0chdbgkpq6daFRe66tOA8mZNFkHvvztd+uwgYS43x
|
||||||
AtwzeQCccj88kBOZ/V5PjJFgGIGVnyi1cVj6MjWrLPbCAAM=
|
hact6iA4768Hw/8ygLMe8+S5GnFmUR8P3N55bIpK6hqtsbI7JVSmEJjcLLIsSyqIjtE/AAAA//8D
|
||||||
|
ANYZ+TrTAgAA
|
||||||
headers:
|
headers:
|
||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 85e2c51b9f7900ef-GRU
|
- 89de84103ab8da1f-MIA
|
||||||
Cache-Control:
|
|
||||||
- no-cache, must-revalidate
|
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Encoding:
|
Content-Encoding:
|
||||||
- br
|
- gzip
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- application/json
|
- application/json
|
||||||
Date:
|
Date:
|
||||||
- Sat, 02 Mar 2024 16:30:12 GMT
|
- Thu, 04 Jul 2024 10:43:43 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
access-control-allow-origin:
|
|
||||||
- '*'
|
|
||||||
alt-svc:
|
alt-svc:
|
||||||
- h3=":443"; ma=86400
|
- h3=":443"; ma=86400
|
||||||
openai-model:
|
|
||||||
- gpt-4-0613
|
|
||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '518'
|
- '235'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '300000'
|
- '16000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '299958'
|
- '15999969'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 8ms
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_6cf57525fad025cb1fe9bddb5d669312
|
- req_5d283f799cb8d11c8280f1c07e4132a1
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,82 +1,33 @@
|
|||||||
interactions:
|
interactions:
|
||||||
- request:
|
- request:
|
||||||
body: !!binary |
|
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||||
Cp4ICiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS9QcKEgoQY3Jld2FpLnRl
|
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||||
bGVtZXRyeRLeBwoQbcSgVp+c/HPtPueafW+iDBII6Vtopi7FQgAqDENyZXcgQ3JlYXRlZDABOcjB
|
final answer to the task use the exact following format:\n\nThought: I now can
|
||||||
8hWO/7gXQRAt9BWO/7gXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTYuM0oaCg5weXRob25fdmVy
|
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQ1OGI3YjUzMS1jYmQxLTQ5Y2UtOGFjOC0wNTkz
|
final answer must be the great and the most complete as possible, it must be
|
||||||
NWJlYjdlNGVKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||||
EgIYAUrJAgoLY3Jld19hZ2VudHMSuQIKtgJbeyJpZCI6ICI1NTk2M2UzNS02NjU4LTRmMmItOTJi
|
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||||
My04ZDE0MGNhMDYwOGUiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogZmFs
|
answer: The score of the title. \n you MUST return the actual complete content
|
||||||
c2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAi
|
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
use the tools available and give your best Final Answer, your job depends on
|
||||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoYBCgpj
|
"stream": true, "temperature": 0.7}'
|
||||||
cmV3X3Rhc2tzEngKdlt7ImlkIjogIjc1OWM0ZGYwLTI4MWUtNDNlMy05Yzc0LWNmMDg3NTczZTJi
|
|
||||||
YSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0
|
|
||||||
b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRi
|
|
||||||
aXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRh
|
|
||||||
cndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6
|
|
||||||
IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxF
|
|
||||||
QVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYAQ==
|
|
||||||
headers:
|
|
||||||
Accept:
|
|
||||||
- '*/*'
|
|
||||||
Accept-Encoding:
|
|
||||||
- gzip, deflate, br
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Length:
|
|
||||||
- '1057'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
User-Agent:
|
|
||||||
- OTel-OTLP-Exporter-Python/1.23.0
|
|
||||||
method: POST
|
|
||||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: "\n\0"
|
|
||||||
headers:
|
|
||||||
Content-Length:
|
|
||||||
- '2'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
Date:
|
|
||||||
- Sat, 02 Mar 2024 16:30:45 GMT
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
- request:
|
|
||||||
body: '{"messages": [{"role": "user", "content": "You are Scorer. You''re an expert
|
|
||||||
scorer, specialized in scoring titles.\nYour personal goal is: Score the titleTo
|
|
||||||
give my best complete final answer to the task use the exact following format:\n\nThought:
|
|
||||||
I now can give a great answer\nFinal Answer: my best complete final answer to
|
|
||||||
the task.\nYour final answer must be the great and the most complete as possible,
|
|
||||||
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
|
||||||
it!\n\nThought: \n\nCurrent Task: Give me an integer score between 1-5 for the
|
|
||||||
following title: ''The impact of AI in the future of work''\n\nThis is the expect
|
|
||||||
criteria for your final answer: The score of the title. \n you MUST return the
|
|
||||||
actual complete content as the final answer, not a summary.\n\nBegin! This is
|
|
||||||
VERY important to you, use the tools available and give your best Final Answer,
|
|
||||||
your job depends on it!\n\nThought: \n"}], "model": "gpt-4", "n": 1, "stop":
|
|
||||||
["\nObservation"], "stream": true, "temperature": 0.7}'
|
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '1012'
|
- '997'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.12.0
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -86,7 +37,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.12.0
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -95,265 +46,64 @@ interactions:
|
|||||||
uri: https://api.openai.com/v1/chat/completions
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: 'data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
string: 'data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Considering"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
relevance"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
topic"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
straightforward"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ness"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
and"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
its"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
ability"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
to"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
capt"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ivate"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
attention"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
due"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
to"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
current"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
global"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
interest"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
AI"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
and"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
its"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
future"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
implications"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
I"},"logprobs":null,"finish_reason":null}]}
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
would"},"logprobs":null,"finish_reason":null}]}
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
give"},"logprobs":null,"finish_reason":null}]}
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
it"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
high"},"logprobs":null,"finish_reason":null}]}
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
score"},"logprobs":null,"finish_reason":null}]}
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
\n\n"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
The"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
impact"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
AI"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
future"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
work"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
scores"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
"},"logprobs":null,"finish_reason":null}]}
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
out"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: [DONE]
|
data: [DONE]
|
||||||
@@ -364,60 +114,53 @@ interactions:
|
|||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 85e2c5e93ad900d7-GRU
|
- 89de825e5af34c0d-MIA
|
||||||
Cache-Control:
|
|
||||||
- no-cache, must-revalidate
|
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- text/event-stream
|
- text/event-stream; charset=utf-8
|
||||||
Date:
|
Date:
|
||||||
- Sat, 02 Mar 2024 16:30:45 GMT
|
- Thu, 04 Jul 2024 10:42:33 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Set-Cookie:
|
Set-Cookie:
|
||||||
- __cf_bm=29uZO.hVYKS5gYK5X6QpUY7F78cYAZS5Re6SlF1k0IE-1709397045-1.0.1.1-0R8xzeW3JkesWRs7meP9a8me.3lSCf1DoFGhgIP2AQEe8B6hF98sBDkh4JwHVXhf7vAWVGSdnfsBCbZpuXm8wQ;
|
- __cf_bm=XNWuPzzvDCR6vj8X4pwaZq_1zuK8TwGTpIqHQc0EbWw-1720089753-1.0.1.1-f61Hw2P4yRgm8mOUN2RhRrvndJQwdxwAS5T8bsfbqXLXSlbSKQONzTKvwOzVDnhHR3gy56nDVq.uAOE1cvvDDQ;
|
||||||
path=/; expires=Sat, 02-Mar-24 17:00:45 GMT; domain=.api.openai.com; HttpOnly;
|
path=/; expires=Thu, 04-Jul-24 11:12:33 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
Secure; SameSite=None
|
Secure; SameSite=None
|
||||||
- _cfuvid=NyDbNO5ZfRQYnj5YFoyLtS.qoTk641EufpijridgyHo-1709397045493-0.0.1.1-604800000;
|
- _cfuvid=ZIy1L3HZwWapuY1KTKhqiOCKReYrjZwlhU2BUCsEpUs-1720089753602-0.0.1.1-604800000;
|
||||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
access-control-allow-origin:
|
|
||||||
- '*'
|
|
||||||
alt-svc:
|
alt-svc:
|
||||||
- h3=":443"; ma=86400
|
- h3=":443"; ma=86400
|
||||||
openai-model:
|
|
||||||
- gpt-4-0613
|
|
||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '234'
|
- '135'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '300000'
|
- '16000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '299768'
|
- '15999772'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 46ms
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_42d5dbd362d2ba837a2d9a0303d93854
|
- req_ae342ee6e026c54b420e69ccb8235272
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
- request:
|
- request:
|
||||||
body: '{"messages": [{"role": "user", "content": "The title ''The impact of AI
|
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||||
in the future of work'' scores a 4 out of 5."}, {"role": "system", "content":
|
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4", "tool_choice":
|
|
||||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||||
@@ -427,20 +170,20 @@ interactions:
|
|||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '588'
|
- '519'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
cookie:
|
cookie:
|
||||||
- __cf_bm=29uZO.hVYKS5gYK5X6QpUY7F78cYAZS5Re6SlF1k0IE-1709397045-1.0.1.1-0R8xzeW3JkesWRs7meP9a8me.3lSCf1DoFGhgIP2AQEe8B6hF98sBDkh4JwHVXhf7vAWVGSdnfsBCbZpuXm8wQ;
|
- __cf_bm=XNWuPzzvDCR6vj8X4pwaZq_1zuK8TwGTpIqHQc0EbWw-1720089753-1.0.1.1-f61Hw2P4yRgm8mOUN2RhRrvndJQwdxwAS5T8bsfbqXLXSlbSKQONzTKvwOzVDnhHR3gy56nDVq.uAOE1cvvDDQ;
|
||||||
_cfuvid=NyDbNO5ZfRQYnj5YFoyLtS.qoTk641EufpijridgyHo-1709397045493-0.0.1.1-604800000
|
_cfuvid=ZIy1L3HZwWapuY1KTKhqiOCKReYrjZwlhU2BUCsEpUs-1720089753602-0.0.1.1-604800000
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.12.0
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -450,7 +193,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.12.0
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -460,60 +203,55 @@ interactions:
|
|||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: !!binary |
|
string: !!binary |
|
||||||
ISALACBGzZmPUTurS3j2kVTLAT6a5FL++b8ZuDRoD4rG1Db90i0Y9gKZWqIJBNTZ2ToXtzv0emRN
|
H4sIAAAAAAAAA2xSS2/bMAy++1cIPMeDm9pN49se3YYVWwKsww5rYagy7biTRUGit6RB/vsgx43d
|
||||||
ePbqAcwjCjDMfBcqK9uz88diNH4LvscP964vn+/L7+hu+vN6NvL0w5YH0ARFHLqeeMfFQM2NhiAn
|
YD4IBD9+D5DeR0JAU0IuQG0kq9bqeLm5+ZDWqvz4nJZ/7nbrBOV2/c0/+XdvP2uYBQY9PqHiF9Yb
|
||||||
l5/FEQX60958OJ/2RjPhHWWiWFKAqXXtUbs36Q97nWYmD+OaAisPAK6lEcB377sU6LVaES8MgQL4
|
Ra3VyA2ZI6wcSsagerGYJ8n1cpGlPdBSiTrQastxSvE8madxksUXlwNxQ41CD7n4FQkhxL5/Q0RT
|
||||||
EcDKyJgC9Os6r52vHVuAXNi6SQHdSCmcc8bIXehLSQ7l4LWeW6S/+lLuksPx1H/5/SlN9tw/x+PH
|
4hZykcxeOi16L2uE/DQkBDjSoQPS+8azNAyzEVRkGE1IbTqtJwAT6UJJrUfj47ef1OOepNbF8uuP
|
||||||
sa9/Xh5OnJB65mwHhXpglGJQWZ0gjADUvprQW3+hqeKvxtnGqTAAmvj3FOB1rYE169BU8ZoCo7X+
|
9ep2O7+6/Zvebd//VKvdzacvyk78jtI72weqOqNO+5ngp35+ZiYEGNn23O+KHK46th2f0YUA6equ
|
||||||
p+rGf6+Obejxf4pllCa1lQnqBcMZtQ7Y3RA2RgHWzlga698DNpasNsa5QlsZZd3OmTLWNQXmM5UA
|
RcMhOuzvwYfhe8jTA7waPUT/qx+G6nBaq6baOnr0Z1uCqjGN3xQOpe/TgmeyR4sg99Cfr3t1EbCO
|
||||||
90weAKfcDw/kRGa/N/XGSDCMwMouyXUaV7bKzSp7/54BAw==
|
WssF0280QfB6uB6M/8sIZgPGxFJPOFk0xAO/84xtUTWmRmdd058SKluU2eLqMllUywSiQ/QPAAD/
|
||||||
|
/wMAo1FYu9MCAAA=
|
||||||
headers:
|
headers:
|
||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 85e2c5fe7ccc00d7-GRU
|
- 89de8261dc6c4c0d-MIA
|
||||||
Cache-Control:
|
|
||||||
- no-cache, must-revalidate
|
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Encoding:
|
Content-Encoding:
|
||||||
- br
|
- gzip
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- application/json
|
- application/json
|
||||||
Date:
|
Date:
|
||||||
- Sat, 02 Mar 2024 16:30:48 GMT
|
- Thu, 04 Jul 2024 10:42:34 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
access-control-allow-origin:
|
|
||||||
- '*'
|
|
||||||
alt-svc:
|
alt-svc:
|
||||||
- h3=":443"; ma=86400
|
- h3=":443"; ma=86400
|
||||||
openai-model:
|
|
||||||
- gpt-4-0613
|
|
||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '516'
|
- '193'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '300000'
|
- '16000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '299951'
|
- '15999969'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 9ms
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_f5cc008a441e72f5c41ca547cac30f03
|
- req_7cf743796bf4a52626b923135ff8f936
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
|
|||||||
@@ -1,168 +1,33 @@
|
|||||||
interactions:
|
interactions:
|
||||||
- request:
|
- request:
|
||||||
body: !!binary |
|
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||||
CqcuCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS/i0KEgoQY3Jld2FpLnRl
|
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||||
bGVtZXRyeRKNAQoQWsK2m2g3VyJHWaXnQym/VhIII86T/ZrUN8EqClRvb2wgVXNhZ2UwATnYaVBJ
|
final answer to the task use the exact following format:\n\nThought: I now can
|
||||||
jjbPF0FYJVFJjjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHwoJdG9vbF9uYW1lEhIK
|
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||||
EGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYARKNAQoQ6Z32Q3lxb6RU5NbYTjLU
|
final answer must be the great and the most complete as possible, it must be
|
||||||
SxIIqcY2PQT6l3gqClRvb2wgVXNhZ2UwATnAus9KjjbPF0EYU9BKjjbPF0oaCg5jcmV3YWlfdmVy
|
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||||
c2lvbhIICgYwLjMwLjZKHwoJdG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1w
|
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||||
dHMSAhgBegIYARKNAQoQwek7gO4ckm9Jvn6BwHW9QxII3T6AbeLq7EIqClRvb2wgVXNhZ2UwATlY
|
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||||
cF9PjjbPF0GoM2BPjjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHwoJdG9vbF9uYW1l
|
answer: The score of the title. \n you MUST return the actual complete content
|
||||||
EhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYARLKAQoQxj4ww0iUXwfHLphC
|
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||||
57m2khIIjJFrK75dr+gqEFRvb2wgVXNhZ2UgRXJyb3IwATlQt3hRjjbPF0GIfnlRjjbPF0oaCg5j
|
use the tools available and give your best Final Answer, your job depends on
|
||||||
cmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKZgoDbGxtEl8KXXsibmFtZSI6IG51bGwsICJtb2RlbF9u
|
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||||
YW1lIjogImdwdC00LTAxMjUtcHJldmlldyIsICJ0ZW1wZXJhdHVyZSI6IDAuNywgImNsYXNzIjog
|
|
||||||
IkNoYXRPcGVuQUkifXoCGAESygEKEArpOi/Us2A3PpB8Gii7j88SCPzLVYFtDQSsKhBUb29sIFVz
|
|
||||||
YWdlIEVycm9yMAE56MF6Uo42zxdBaH17Uo42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42
|
|
||||||
SmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtNC0wMTI1LXByZXZp
|
|
||||||
ZXciLCAidGVtcGVyYXR1cmUiOiAwLjcsICJjbGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEsoBChBt
|
|
||||||
yk7Qq7Sgw6WMHkcEj6WfEgijl4wq3/wt7SoQVG9vbCBVc2FnZSBFcnJvcjABOeAyk1OONs8XQdja
|
|
||||||
k1OONs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzAuNkpmCgNsbG0SXwpdeyJuYW1lIjogbnVs
|
|
||||||
bCwgIm1vZGVsX25hbWUiOiAiZ3B0LTQtMDEyNS1wcmV2aWV3IiwgInRlbXBlcmF0dXJlIjogMC43
|
|
||||||
LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARJiChAmBkQ6MDSlxvmv4YVRGzmdEgj1PJQNKV6C
|
|
||||||
sCoQVG9vbCBVc2FnZSBFcnJvcjABOfgzEleONs8XQQjYEleONs8XShoKDmNyZXdhaV92ZXJzaW9u
|
|
||||||
EggKBjAuMzAuNnoCGAESjQEKEAwZ3MFe3tvKT/3fZgpV4ywSCEicyz0QPrdVKgpUb29sIFVzYWdl
|
|
||||||
MAE5CPdvWI42zxdBWLpwWI42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42Sh8KCXRvb2xf
|
|
||||||
bmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAESjQEKEGIhNyFGf0X0
|
|
||||||
iubEVUn96JASCCPQkQmTahyuKgpUb29sIFVzYWdlMAE5gFEoWo42zxdBgEspWo42zxdKGgoOY3Jl
|
|
||||||
d2FpX3ZlcnNpb24SCAoGMC4zMC42Sh8KCXRvb2xfbmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4K
|
|
||||||
CGF0dGVtcHRzEgIYAXoCGAESjQEKECluqAOjM96Xcuo2pY/AxrUSCE2CDrSmkuFVKgpUb29sIFVz
|
|
||||||
YWdlMAE5YJvZW442zxdBoLTaW442zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42Sh8KCXRv
|
|
||||||
b2xfbmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAESYgoQ3rypXzEG
|
|
||||||
OaBuogXtWu23OxIIJU2tAn4rSesqEFRvb2wgVXNhZ2UgRXJyb3IwATmAst9fjjbPF0GgfeBfjjbP
|
|
||||||
F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZ6AhgBEpcBChCoJakn4gJ/bsmLqhOQ+pzJEggZ
|
|
||||||
FE39y55kSSoKVG9vbCBVc2FnZTABOdAenmKONs8XQdAYn2KONs8XShoKDmNyZXdhaV92ZXJzaW9u
|
|
||||||
EggKBjAuMzAuNkopCgl0b29sX25hbWUSHAoaRGVsZWdhdGUgd29yayB0byBjby13b3JrZXJKDgoI
|
|
||||||
YXR0ZW1wdHMSAhgBegIYARKWAQoQbG8DmDglapO/4qWaAMeMhhII+ZHmV8V3pYsqClRvb2wgVXNh
|
|
||||||
Z2UwATlwxGFojjbPF0Goi2JojjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKKAoJdG9v
|
|
||||||
bF9uYW1lEhsKGUFzayBxdWVzdGlvbiB0byBjby13b3JrZXJKDgoIYXR0ZW1wdHMSAhgBegIYARKL
|
|
||||||
AQoQA3elhWektmDyQtq61PMfARIInyy2MM6Y3gwqClRvb2wgVXNhZ2UwATnoK/drjjbPF0EI9/dr
|
|
||||||
jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJuX2Fi
|
|
||||||
b3V0X0FJSg4KCGF0dGVtcHRzEgIYAXoCGAESiwEKEGOUc3nLZT3TTF1UqTEf3aYSCAIG5j6E1JLk
|
|
||||||
KgpUb29sIFVzYWdlMAE5wIxYbY42zxdBuDRZbY42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4z
|
|
||||||
MC42Sh0KCXRvb2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAF6AhgBEmIK
|
|
||||||
ELpJQh4iRyb0LtlYOjZkn+8SCCPJgujx+G0+KhBUb29sIFVzYWdlIEVycm9yMAE5ULmabo42zxdB
|
|
||||||
wE2bbo42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42egIYARLzAQoQFQ74Wok+KKV4rR5E
|
|
||||||
kSYVMRIIJRc3G0tgxY4qClRvb2wgVXNhZ2UwATlYx7FzjjbPF0EYorJzjjbPF0oaCg5jcmV3YWlf
|
|
||||||
dmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJuX2Fib3V0X0FJSg4KCGF0dGVt
|
|
||||||
cHRzEgIYAUpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10
|
|
||||||
dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIY
|
|
||||||
ARLzAQoQnJMORTVCM7CSablsUBxMJxIIQCKuv5AXvFEqClRvb2wgVXNhZ2UwATl4LDp1jjbPF0EQ
|
|
||||||
5Dp1jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJu
|
|
||||||
X2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVs
|
|
||||||
X25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3Mi
|
|
||||||
OiAiQ2hhdE9wZW5BSSJ9egIYARLzAQoQAfw9BLcOs9bjmKBOkNP2pRII8jFQggGaAhsqClRvb2wg
|
|
||||||
VXNhZ2UwATlAbQJ3jjbPF0HwIAN3jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJ
|
|
||||||
dG9vbF9uYW1lEhAKDmxlYXJuX2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJu
|
|
||||||
YW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0
|
|
||||||
dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARKXAQoQuzP2zqalo852qSEZJPoW
|
|
||||||
/RIInedZySqvqygqClRvb2wgVXNhZ2UwATkYAGWujjbPF0Go4mWujjbPF0oaCg5jcmV3YWlfdmVy
|
|
||||||
c2lvbhIICgYwLjMwLjZKKQoJdG9vbF9uYW1lEhwKGkRlbGVnYXRlIHdvcmsgdG8gY28td29ya2Vy
|
|
||||||
Sg4KCGF0dGVtcHRzEgIYAXoCGAESlwEKEATlyOCYjHSp1K3QoCeqMmUSCP/SCa7gOMy+KgpUb29s
|
|
||||||
IFVzYWdlMAE5QKeKuY42zxdBqGaLuY42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42SikK
|
|
||||||
CXRvb2xfbmFtZRIcChpEZWxlZ2F0ZSB3b3JrIHRvIGNvLXdvcmtlckoOCghhdHRlbXB0cxICGAF6
|
|
||||||
AhgBEpcBChDCNHUdgM9SPykW9jZLAszpEgjxF3ox7N4UJyoKVG9vbCBVc2FnZTABOeDPiMqONs8X
|
|
||||||
QdCiicqONs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzAuNkopCgl0b29sX25hbWUSHAoaRGVs
|
|
||||||
ZWdhdGUgd29yayB0byBjby13b3JrZXJKDgoIYXR0ZW1wdHMSAhgBegIYARKHAQoQ79rs2SVt97mT
|
|
||||||
uX/cWDscnBII6rrlGA8RtbMqClRvb2wgVXNhZ2UwATkYFcz6jjbPF0FQ3Mz6jjbPF0oaCg5jcmV3
|
|
||||||
YWlfdmVyc2lvbhIICgYwLjMwLjZKGQoJdG9vbF9uYW1lEgwKCm11bHRpcGxpZXJKDgoIYXR0ZW1w
|
|
||||||
dHMSAhgBegIYARKHAQoQVP9tyrp7Wv9IolbUgao9uBII8inECsdSecYqClRvb2wgVXNhZ2UwATl4
|
|
||||||
JZH8jjbPF0GIyZH8jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKGQoJdG9vbF9uYW1l
|
|
||||||
EgwKCm11bHRpcGxpZXJKDgoIYXR0ZW1wdHMSAhgBegIYARJiChCQ/ikD6wODQRRK4sOy00xbEgh3
|
|
||||||
W3X41OAJQSoQVG9vbCBVc2FnZSBFcnJvcjABOXi4pf+ONs8XQeB3pv+ONs8XShoKDmNyZXdhaV92
|
|
||||||
ZXJzaW9uEggKBjAuMzAuNnoCGAESYgoQ53JWAKq1hJb9rscUFhC6GhIIPzTRaeMJnJYqEFRvb2wg
|
|
||||||
VXNhZ2UgRXJyb3IwATkIPJsAjzbPF0GQzJsAjzbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMw
|
|
||||||
LjZ6AhgBEvMBChC/rx/MIKAdxGOMAfLmXNJoEghCqOptNDfKXSoKVG9vbCBVc2FnZTABOaAAQwuP
|
|
||||||
Ns8XQejuQwuPNs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzAuNkodCgl0b29sX25hbWUSEAoO
|
|
||||||
bGVhcm5fYWJvdXRfQUlKDgoIYXR0ZW1wdHMSAhgBSmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAi
|
|
||||||
bW9kZWxfbmFtZSI6ICJncHQtMy41LXR1cmJvLTAxMjUiLCAidGVtcGVyYXR1cmUiOiAwLjcsICJj
|
|
||||||
bGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEsoBChAJc/cdYOh6bOxXLE4BSMa0Egg1m5FL4NyjeCoQ
|
|
||||||
VG9vbCBVc2FnZSBFcnJvcjABOXiAigyPNs8XQZhLiwyPNs8XShoKDmNyZXdhaV92ZXJzaW9uEggK
|
|
||||||
BjAuMzAuNkpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10
|
|
||||||
dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIY
|
|
||||||
ARLzAQoQIwsE8AeUGx2Zlpsj/8cIpxIIGiYkqh1O8V8qClRvb2wgVXNhZ2UwATmIou0NjzbPF0EI
|
|
||||||
Xu4NjzbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJu
|
|
||||||
X2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVs
|
|
||||||
X25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3Mi
|
|
||||||
OiAiQ2hhdE9wZW5BSSJ9egIYARLzAQoQzE4sWSyDKyW+MsjMu3qaURII0Ek+EFuG+pQqClRvb2wg
|
|
||||||
VXNhZ2UwATl4knUPjzbPF0FYPnYPjzbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJ
|
|
||||||
dG9vbF9uYW1lEhAKDmxlYXJuX2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJu
|
|
||||||
YW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0
|
|
||||||
dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARKIAQoQV5Ebj7z0WSmnDL6cuEwc
|
|
||||||
aRIIjQwTYaUnU+0qClRvb2wgVXNhZ2UwATmoPboXjzbPF0Eo+boXjzbPF0oaCg5jcmV3YWlfdmVy
|
|
||||||
c2lvbhIICgYwLjMwLjZKGgoJdG9vbF9uYW1lEg0KC3JldHVybl9kYXRhSg4KCGF0dGVtcHRzEgIY
|
|
||||||
AXoCGAESlwEKEC6MfuEYt3qySHDPP/fGhDoSCME60nPc2PP5KgpUb29sIFVzYWdlMAE5kBWpIo82
|
|
||||||
zxdBmOSpIo82zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42SikKCXRvb2xfbmFtZRIcChpE
|
|
||||||
ZWxlZ2F0ZSB3b3JrIHRvIGNvLXdvcmtlckoOCghhdHRlbXB0cxICGAF6AhgBEo8BChAW4yhha9hM
|
|
||||||
Bq622l4ljFgrEgiqbqTTWlbblCoKVG9vbCBVc2FnZTABOcgCViiPNs8XQVjlViiPNs8XShoKDmNy
|
|
||||||
ZXdhaV92ZXJzaW9uEggKBjAuMzAuNkohCgl0b29sX25hbWUSFAoSbXVsdGlwbGNhdGlvbl90b29s
|
|
||||||
Sg4KCGF0dGVtcHRzEgIYAXoCGAESjwEKEDMq2VDCq4/7QaQhcHACuUQSCN8bD12jrjruKgpUb29s
|
|
||||||
IFVzYWdlMAE5OAsOKo82zxdBSK8OKo82zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42SiEK
|
|
||||||
CXRvb2xfbmFtZRIUChJtdWx0aXBsY2F0aW9uX3Rvb2xKDgoIYXR0ZW1wdHMSAhgBegIYARKPAQoQ
|
|
||||||
7Q6do5ZLFG1iUnTRbk8ZOBIISweZORdlmm8qClRvb2wgVXNhZ2UwATngLc4rjzbPF0HY1c4rjzbP
|
|
||||||
F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKIQoJdG9vbF9uYW1lEhQKEm11bHRpcGxjYXRp
|
|
||||||
b25fdG9vbEoOCghhdHRlbXB0cxICGAF6AhgBEo8BChDAqYWXAeTztQaza44KbHvPEgjLnc1ioqFZ
|
|
||||||
2yoKVG9vbCBVc2FnZTABOWAPky2PNs8XQaCrky2PNs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAu
|
|
||||||
MzAuNkohCgl0b29sX25hbWUSFAoSbXVsdGlwbGNhdGlvbl90b29sSg4KCGF0dGVtcHRzEgIYAXoC
|
|
||||||
GAE=
|
|
||||||
headers:
|
|
||||||
Accept:
|
|
||||||
- '*/*'
|
|
||||||
Accept-Encoding:
|
|
||||||
- gzip, deflate, br
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Length:
|
|
||||||
- '5930'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
User-Agent:
|
|
||||||
- OTel-OTLP-Exporter-Python/1.24.0
|
|
||||||
method: POST
|
|
||||||
uri: https://telemetry.crewai.com:4319/v1/traces
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: "\n\0"
|
|
||||||
headers:
|
|
||||||
Content-Length:
|
|
||||||
- '2'
|
|
||||||
Content-Type:
|
|
||||||
- application/x-protobuf
|
|
||||||
Date:
|
|
||||||
- Tue, 14 May 2024 01:26:13 GMT
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
- request:
|
|
||||||
body: '{"messages": [{"role": "user", "content": "You are Scorer. You''re an expert
|
|
||||||
scorer, specialized in scoring titles.\nYour personal goal is: Score the titleTo
|
|
||||||
give my best complete final answer to the task use the exact following format:\n\nThought:
|
|
||||||
I now can give a great answer\nFinal Answer: my best complete final answer to
|
|
||||||
the task.\nYour final answer must be the great and the most complete as possible,
|
|
||||||
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
|
||||||
it!\nCurrent Task: Give me an integer score between 1-5 for the following title:
|
|
||||||
''The impact of AI in the future of work''\n\nThis is the expect criteria for
|
|
||||||
your final answer: The score of the title. \n you MUST return the actual complete
|
|
||||||
content as the final answer, not a summary.\n\nBegin! This is VERY important
|
|
||||||
to you, use the tools available and give your best Final Answer, your job depends
|
|
||||||
on it!\n\nThought:\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"],
|
|
||||||
"stream": true, "temperature": 0.7}'
|
"stream": true, "temperature": 0.7}'
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '996'
|
- '997'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.29.0
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -172,7 +37,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.29.0
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -181,268 +46,64 @@ interactions:
|
|||||||
uri: https://api.openai.com/v1/chat/completions
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: 'data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
string: 'data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"After"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
considering"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
relevance"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
depth"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
and"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
potential"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
interest"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
topic"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
as"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
well"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
as"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
clarity"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
and"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
simplicity"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
impact"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
AI"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
future"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
work"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''.\n\n"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
Based"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
on"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
my"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
evaluation"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
I"},"logprobs":null,"finish_reason":null}]}
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
can"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
give"},"logprobs":null,"finish_reason":null}]}
|
give"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
title"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
impact"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
AI"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
in"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
the"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
future"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
work"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
a"},"logprobs":null,"finish_reason":null}]}
|
a"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
score"},"logprobs":null,"finish_reason":null}]}
|
great"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||||
"},"logprobs":null,"finish_reason":null}]}
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
out"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
of"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
||||||
"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: [DONE]
|
data: [DONE]
|
||||||
@@ -453,20 +114,20 @@ interactions:
|
|||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 8837194f3860a686-MIA
|
- 89ed158b8bf0a566-MIA
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- text/event-stream; charset=utf-8
|
- text/event-stream; charset=utf-8
|
||||||
Date:
|
Date:
|
||||||
- Tue, 14 May 2024 01:26:14 GMT
|
- Sat, 06 Jul 2024 05:09:42 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Set-Cookie:
|
Set-Cookie:
|
||||||
- __cf_bm=o_N1eUBbqWWHvbE622tXs5BC9yN1X1bxp3npQXI46JU-1715649974-1.0.1.1-ALOCo.oJW08V4.F5WqVArJDbxakTERtLPfwUlDjTYrrg_WiJox.Pw_n.PnDfEdxa52BbaPI8M80h2S3wdJy2sw;
|
- __cf_bm=5C3MG9ni0I5bZoHGzfXZq16obGaD1INR3_.wX4CRPAk-1720242582-1.0.1.1-fZiD6L1FdBiC0gqcmBK9_IaHhbHPQi4z04fxYQtoDc9KbYqPvxm_sxP_RkuZX_AyPkHgu85IRq9E6MUAZJGzwQ;
|
||||||
path=/; expires=Tue, 14-May-24 01:56:14 GMT; domain=.api.openai.com; HttpOnly;
|
path=/; expires=Sat, 06-Jul-24 05:39:42 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
Secure; SameSite=None
|
Secure; SameSite=None
|
||||||
- _cfuvid=vYif.qp5YND_OTGCG3IgxPXT3sqDTSpeVEIUapP.n.k-1715649974367-0.0.1.1-604800000;
|
- _cfuvid=YP7Z3XnHPKQDU2nOhrLzkxr8InOv42HLWchJd1ogneQ-1720242582534-0.0.1.1-604800000;
|
||||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
@@ -475,55 +136,54 @@ interactions:
|
|||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '510'
|
- '90'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '300000'
|
- '16000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9997'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '299098'
|
- '15999772'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 13ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 180ms
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_26f0f1e02a54a0aaf7e8b96ba7d56837
|
- req_36d283adbca77945609f0da658047ba0
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
- request:
|
- request:
|
||||||
body: '{"messages": [{"role": "user", "content": "Based on my evaluation, I give
|
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||||
the title ''The impact of AI in the future of work'' a score of 4 out of 5."},
|
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||||
{"role": "system", "content": "I''m gonna convert this raw text into valid JSON."}],
|
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||||
"model": "gpt-4", "tool_choice": {"type": "function", "function": {"name": "ScoreOutput"}},
|
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||||
"tools": [{"type": "function", "function": {"name": "ScoreOutput", "description":
|
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||||
"Correctly extracted `ScoreOutput` with all the required parameters with correct
|
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||||
types", "parameters": {"properties": {"score": {"title": "Score", "type": "integer"}},
|
["score"], "type": "object"}}}]}'
|
||||||
"required": ["score"], "type": "object"}}}]}'
|
|
||||||
headers:
|
headers:
|
||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
- '621'
|
- '519'
|
||||||
content-type:
|
content-type:
|
||||||
- application/json
|
- application/json
|
||||||
cookie:
|
cookie:
|
||||||
- __cf_bm=o_N1eUBbqWWHvbE622tXs5BC9yN1X1bxp3npQXI46JU-1715649974-1.0.1.1-ALOCo.oJW08V4.F5WqVArJDbxakTERtLPfwUlDjTYrrg_WiJox.Pw_n.PnDfEdxa52BbaPI8M80h2S3wdJy2sw;
|
- __cf_bm=5C3MG9ni0I5bZoHGzfXZq16obGaD1INR3_.wX4CRPAk-1720242582-1.0.1.1-fZiD6L1FdBiC0gqcmBK9_IaHhbHPQi4z04fxYQtoDc9KbYqPvxm_sxP_RkuZX_AyPkHgu85IRq9E6MUAZJGzwQ;
|
||||||
_cfuvid=vYif.qp5YND_OTGCG3IgxPXT3sqDTSpeVEIUapP.n.k-1715649974367-0.0.1.1-604800000
|
_cfuvid=YP7Z3XnHPKQDU2nOhrLzkxr8InOv42HLWchJd1ogneQ-1720242582534-0.0.1.1-604800000
|
||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.29.0
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -533,7 +193,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.29.0
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -543,26 +203,27 @@ interactions:
|
|||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: !!binary |
|
string: !!binary |
|
||||||
g2QBAMTINfMxamd1CeMeEI6KoYFref5vBi4N2oOiMbVNv3QLhr1AppZoAgF1drbOzezQ65E14dmt
|
H4sIAAAAAAAAA2xS30/bMBB+z19h3XMzhbShJW8wiW1MGogixDRQ5DpOanB8ln1hK1X/d+Q0NKFa
|
||||||
ADjNKcFsksRMO1Ud3qXHqqnW3c7ZUq/M23vj7mI9eXr198urN1YEQJvOiiz2xGsuBurUGghycvlZ
|
HqzTfff90F22EWOgSsgZiDUn0Vgdn63dJl3d3rSv5xuf/b7Di/PLq/JUVYub5wwmgYGrZynog/VF
|
||||||
kVOi2W92e53hsN8X3tE2LxQlOHax2qk2es12r9OJnWZFoMSXAIBtaQTw3fsuJRqVVsQLQ6AEfgTQ
|
YGO1JIVmDwsnOcmgejJPk3SWZou0AxospQ602lI8wziAcZLFJ9OeuEYlpIec/YkYY2zbvSGiKeU/
|
||||||
W1VQgkkI0xATE1kB5MLWTUqYUinhXLRW/WWJUuRQDm7ruUX6a6LU3/3z09v8MH9W3WF6MLi0g3h7
|
yFky+eg00nteS8gPQ4yBQx06wL1XnrghmAygQEPShNSm1XoEEKIuBNd6MN5/21E97IlrXdxePlzN
|
||||||
/LJO+pyQembtBoV6YJRiUFmdJIwANIme0FtPmfXFXRldGVUYAE38e0pw+22Ab4bM+uKbEp1vs6fq
|
/s6/rpZv3x+WF9P7nz++vdz/8iO/vfTGdoGq1ojDfkb4oZ8fmTEGhjcddynQyeuWbEtHdMaAu7pt
|
||||||
xr2oYz/0+J5iGZUdO2/TsGA4o9YBfzeEjVGCIVpHY+0F8GPJammcS3Teahf/op0XJlCi2ejpBLhp
|
pKEQHbaP4MPwI+SzHXwa3UX/q5/6andYq8baOlz5oy1BpYzy68JJ7ru04Ant3iLIPXXnaz9dBKzD
|
||||||
cghsmHJfPJATmc1mVwySYByBlb/R1IwL7/zUrrLYCwMD
|
xlJB+CJNEFz014PhfxnArMcIiesRJ4v6eOA3nmRTVMrU0lmnulNCZYsym59Ok3l1lkC0i94BAAD/
|
||||||
|
/wMAylx2sdMCAAA=
|
||||||
headers:
|
headers:
|
||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 88371965caa0a686-MIA
|
- 89ed158dee46a566-MIA
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Encoding:
|
Content-Encoding:
|
||||||
- br
|
- gzip
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- application/json
|
- application/json
|
||||||
Date:
|
Date:
|
||||||
- Tue, 14 May 2024 01:26:18 GMT
|
- Sat, 06 Jul 2024 05:09:42 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
@@ -572,25 +233,25 @@ interactions:
|
|||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '994'
|
- '144'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
- '300000'
|
- '16000000'
|
||||||
x-ratelimit-remaining-requests:
|
x-ratelimit-remaining-requests:
|
||||||
- '9999'
|
- '9999'
|
||||||
x-ratelimit-remaining-tokens:
|
x-ratelimit-remaining-tokens:
|
||||||
- '299944'
|
- '15999969'
|
||||||
x-ratelimit-reset-requests:
|
x-ratelimit-reset-requests:
|
||||||
- 6ms
|
- 6ms
|
||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 11ms
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_a9b61926ad5fdaf004dd4b99738bcadb
|
- req_990566332b9b1851c581486c0a4da0e6
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
|
|||||||
@@ -0,0 +1,146 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: !!binary |
|
||||||
|
CtoyCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkSsTIKEgoQY3Jld2FpLnRl
|
||||||
|
bGVtZXRyeRJpChCqSW3lVdefwRBLNXi6Xhm8EgijxkgGOCJOMSoQVG9vbCBVc2FnZSBFcnJvcjAB
|
||||||
|
OQh+giEMv94XQYgthSEMv94XShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMzAuMTF6AhgBhQEAAQAA
|
||||||
|
EmkKEITX9xWzBQ0KqeidbMtD1zESCLEDq6L4lZGPKhBUb29sIFVzYWdlIEVycm9yMAE5OF5weAy/
|
||||||
|
3hdBqOZyeAy/3hdKGwoOY3Jld2FpX3ZlcnNpb24SCQoHMC4zMC4xMXoCGAGFAQABAAASgAIKEPfc
|
||||||
|
duYrDUwkgj4RUzAtD2cSCFdOYgVbkCuLKg5UYXNrIEV4ZWN1dGlvbjABOciuqqkLv94XQcA1NNcM
|
||||||
|
v94XSjEKB3Rhc2tfaWQSJgokMzg2ZTBkMWQtMWVjNy00M2QzLTg3MWItNWU3ZjBiZjBkOWVmSi0K
|
||||||
|
FWZvcm1hdHRlZF9kZXNjcmlwdGlvbhIUChJIb3cgbXVjaCBpcyAyICsgMj9KQwoZZm9ybWF0dGVk
|
||||||
|
X2V4cGVjdGVkX291dHB1dBImCiRUaGUgcmVzdWx0IG9mIHRoZSBzdW0gYXMgYW4gaW50ZWdlci5K
|
||||||
|
DQoGb3V0cHV0EgMKATR6AhgBhQEAAQAAErYLChC8sKh5qQC3H99eVsmrP4vCEgizWQpCjdW1WSoM
|
||||||
|
Q3JldyBDcmVhdGVkMAE5eIbY2Qy/3hdBSILa2Qy/3hdKGwoOY3Jld2FpX3ZlcnNpb24SCQoHMC4z
|
||||||
|
MC4xMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjVKMQoHY3Jld19pZBImCiQ1MjA2MDE0Zi0w
|
||||||
|
NDUyLTQzNjQtOWZiMS1lNWM4MzZmNmZiYmJKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxK
|
||||||
|
EQoLY3Jld19tZW1vcnkSAhAAShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251
|
||||||
|
bWJlcl9vZl9hZ2VudHMSAhgBStIECgtjcmV3X2FnZW50cxLCBAq/BFt7ImlkIjogIjU2OTBjOGJm
|
||||||
|
LWVkMzEtNGU1OC04NzBhLTE2OWM3OTQ1ODdjOSIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAiZ29h
|
||||||
|
bCI6ICJNYWtlIHRoZSBiZXN0IHJlc2VhcmNoIGFuZCBhbmFseXNpcyBvbiBjb250ZW50IGFib3V0
|
||||||
|
IEFJIGFuZCBBSSBhZ2VudHMiLCAiYmFja3N0b3J5IjogIllvdSdyZSBhbiBleHBlcnQgcmVzZWFy
|
||||||
|
Y2hlciwgc3BlY2lhbGl6ZWQgaW4gdGVjaG5vbG9neSwgc29mdHdhcmUgZW5naW5lZXJpbmcsIEFJ
|
||||||
|
IGFuZCBzdGFydHVwcy4gWW91IHdvcmsgYXMgYSBmcmVlbGFuY2VyIGFuZCBpcyBub3cgd29ya2lu
|
||||||
|
ZyBvbiBkb2luZyByZXNlYXJjaCBhbmQgYW5hbHlzaXMgZm9yIGEgbmV3IGN1c3RvbWVyLiIsICJ2
|
||||||
|
ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAyNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6
|
||||||
|
IG51bGwsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNG9c
|
||||||
|
IiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRl
|
||||||
|
bGVnYXRpb25fZW5hYmxlZD8iOiB0cnVlLCAidG9vbHNfbmFtZXMiOiBbXX1dStACCgpjcmV3X3Rh
|
||||||
|
c2tzEsECCr4CW3siaWQiOiAiZTE5ODM4Y2EtOGNhMi00MzhiLThiNmMtNDFmM2VlYjJmMDA1Iiwg
|
||||||
|
ImRlc2NyaXB0aW9uIjogIkxvb2sgYXQgdGhlIGF2YWlsYWJsZSBkYXRhIG5kIGdpdmUgbWUgYSBz
|
||||||
|
ZW5zZSBvbiB0aGUgdG90YWwgbnVtYmVyIG9mIHNhbGVzLiIsICJleHBlY3RlZF9vdXRwdXQiOiAi
|
||||||
|
VGhlIHRvdGFsIG51bWJlciBvZiBzYWxlcyBhcyBhbiBpbnRlZ2VyIiwgImFzeW5jX2V4ZWN1dGlv
|
||||||
|
bj8iOiBmYWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJSZXNlYXJj
|
||||||
|
aGVyIiwgImNvbnRleHQiOiBudWxsLCAidG9vbHNfbmFtZXMiOiBbXX1dSioKCHBsYXRmb3JtEh4K
|
||||||
|
HG1hY09TLTE0LjEuMS1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4x
|
||||||
|
LjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURh
|
||||||
|
cndpbiBLZXJuZWwgVmVyc2lvbiAyMy4xLjA6IE1vbiBPY3QgIDkgMjE6Mjc6MjQgUERUIDIwMjM7
|
||||||
|
IHJvb3Q6eG51LTEwMDAyLjQxLjl+Ni9SRUxFQVNFX0FSTTY0X1Q2MDAwSgoKBGNwdXMSAhgKegIY
|
||||||
|
AYUBAAEAABL/CAoQ96LQBkWdya2FFyVOx27tLRIIXv2S0EgHgE4qDENyZXcgQ3JlYXRlZDABOdDJ
|
||||||
|
bNwMv94XQRiybtwMv94XShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMzAuMTFKGgoOcHl0aG9uX3Zl
|
||||||
|
cnNpb24SCAoGMy4xMS41SjEKB2NyZXdfaWQSJgokOGM5OTQzNDMtYzcyYi00MzIwLTlkNTItNzI2
|
||||||
|
MTlkNmNmZTc3ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShEKC2NyZXdfbWVtb3J5EgIQ
|
||||||
|
AEoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIY
|
||||||
|
AUr+AgoLY3Jld19hZ2VudHMS7gIK6wJbeyJpZCI6ICI1YThjOGRjZS00YmQyLTQyMWEtYjUzZC1k
|
||||||
|
ZjE5ODEzMDFiYjEiLCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgImdvYWwiOiAiQmUgc3VwZXIgZW1w
|
||||||
|
YXRoZXRpYy4iLCAiYmFja3N0b3J5IjogIllvdSdyZSBsb3ZlIHRvIHNleSBob3dkeS4iLCAidmVy
|
||||||
|
Ym9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMjUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiBu
|
||||||
|
dWxsLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRvXCIs
|
||||||
|
IFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxl
|
||||||
|
Z2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1K7QEKCmNyZXdfdGFz
|
||||||
|
a3MS3gEK2wFbeyJpZCI6ICJmNWEwMjU3Ni1iNTYxLTRiNzQtOTNhMC0yNmYxMDc2YWI5M2MiLCAi
|
||||||
|
ZGVzY3JpcHRpb24iOiAic2F5IGhvd2R5IiwgImV4cGVjdGVkX291dHB1dCI6ICJIb3dkeSEiLCAi
|
||||||
|
YXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiaHVtYW5faW5wdXQ/IjogZmFsc2UsICJhZ2VudF9y
|
||||||
|
b2xlIjogIlJlc2VhcmNoZXIiLCAiY29udGV4dCI6IG51bGwsICJ0b29sc19uYW1lcyI6IFtdfV1K
|
||||||
|
KgoIcGxhdGZvcm0SHgocbWFjT1MtMTQuMS4xLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9y
|
||||||
|
ZWxlYXNlEggKBjIzLjEuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3Jt
|
||||||
|
X3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjEuMDogTW9uIE9jdCAgOSAyMToy
|
||||||
|
NzoyNCBQRFQgMjAyMzsgcm9vdDp4bnUtMTAwMDIuNDEuOX42L1JFTEVBU0VfQVJNNjRfVDYwMDBK
|
||||||
|
CgoEY3B1cxICGAp6AhgBhQEAAQAAErgCChCZQqdNoQoZm/bgnzRGX2OfEgguN/7szlsLYioOVGFz
|
||||||
|
ayBFeGVjdXRpb24wATmwOXfcDL/eF0FoPgTeDL/eF0oxCgd0YXNrX2lkEiYKJGY1YTAyNTc2LWI1
|
||||||
|
NjEtNGI3NC05M2EwLTI2ZjEwNzZhYjkzY0okChVmb3JtYXR0ZWRfZGVzY3JpcHRpb24SCwoJc2F5
|
||||||
|
IGhvd2R5SiUKGWZvcm1hdHRlZF9leHBlY3RlZF9vdXRwdXQSCAoGSG93ZHkhSmwKBm91dHB1dBJi
|
||||||
|
CmBIb3dkeSEgSSBob3BlIHRoaXMgbWVzc2FnZSBmaW5kcyB5b3Ugd2VsbCBhbmQgYnJpbmdzIGEg
|
||||||
|
c21pbGUgdG8geW91ciBmYWNlLiBIYXZlIGEgZmFudGFzdGljIGRheSF6AhgBhQEAAQAAEv8IChBG
|
||||||
|
RLGvZYMTRLcpQuhEq3MREggMeXM0BUGtiSoMQ3JldyBDcmVhdGVkMAE5KKNZ4Qy/3hdBWI9b4Qy/
|
||||||
|
3hdKGwoOY3Jld2FpX3ZlcnNpb24SCQoHMC4zMC4xMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjEx
|
||||||
|
LjVKMQoHY3Jld19pZBImCiQxN2Q3YmMzYi04MjE2LTQ4MWMtOGU2YS0zN2U4MDA3M2E1YmJKHAoM
|
||||||
|
Y3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKEQoLY3Jld19tZW1vcnkSAhAAShoKFGNyZXdfbnVt
|
||||||
|
YmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSv4CCgtjcmV3X2Fn
|
||||||
|
ZW50cxLuAgrrAlt7ImlkIjogIjRiYTAzMmMzLWE1NDktNDQyMS05MjY0LTY1ZmVmODZhMTI3MiIs
|
||||||
|
ICJyb2xlIjogIlJlc2VhcmNoZXIiLCAiZ29hbCI6ICJCZSBzdXBlciBlbXBhdGhldGljLiIsICJi
|
||||||
|
YWNrc3RvcnkiOiAiWW91J3JlIGxvdmUgdG8gc2V5IGhvd2R5LiIsICJ2ZXJib3NlPyI6IGZhbHNl
|
||||||
|
LCAibWF4X2l0ZXIiOiAyNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6IG51bGwsICJsbG0iOiAi
|
||||||
|
e1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNG9cIiwgXCJ0ZW1wZXJhdHVy
|
||||||
|
ZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxl
|
||||||
|
ZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUrtAQoKY3Jld190YXNrcxLeAQrbAVt7Imlk
|
||||||
|
IjogImJlOGZjMWNmLTVjMTYtNDQ4NS04NDZkLTlmNzkxNjcxZmE0NCIsICJkZXNjcmlwdGlvbiI6
|
||||||
|
ICJzYXkgaG93ZHkiLCAiZXhwZWN0ZWRfb3V0cHV0IjogIkhvd2R5ISIsICJhc3luY19leGVjdXRp
|
||||||
|
b24/IjogZmFsc2UsICJodW1hbl9pbnB1dD8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiUmVzZWFy
|
||||||
|
Y2hlciIsICJjb250ZXh0IjogbnVsbCwgInRvb2xzX25hbWVzIjogW119XUoqCghwbGF0Zm9ybRIe
|
||||||
|
ChxtYWNPUy0xNC4xLjEtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMu
|
||||||
|
MS4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVE
|
||||||
|
YXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMS4wOiBNb24gT2N0ICA5IDIxOjI3OjI0IFBEVCAyMDIz
|
||||||
|
OyByb290OnhudS0xMDAwMi40MS45fjYvUkVMRUFTRV9BUk02NF9UNjAwMEoKCgRjcHVzEgIYCnoC
|
||||||
|
GAGFAQABAAAS3gEKEPaxZgbRdGE/aC7TcpTEU3USCO2gTkTtndVwKg5UYXNrIEV4ZWN1dGlvbjAB
|
||||||
|
OSjeZOEMv94XQXgCO+IMv94XSjEKB3Rhc2tfaWQSJgokYmU4ZmMxY2YtNWMxNi00NDg1LTg0NmQt
|
||||||
|
OWY3OTE2NzFmYTQ0SiQKFWZvcm1hdHRlZF9kZXNjcmlwdGlvbhILCglzYXkgaG93ZHlKJQoZZm9y
|
||||||
|
bWF0dGVkX2V4cGVjdGVkX291dHB1dBIICgZIb3dkeSFKEgoGb3V0cHV0EggKBkhvd2R5IXoCGAGF
|
||||||
|
AQABAAAS6AwKEDPpREZrHZXFHl0sOJRtgesSCBN6824xY2RxKgxDcmV3IENyZWF0ZWQwATl40Vfi
|
||||||
|
DL/eF0HQY1niDL/eF0obCg5jcmV3YWlfdmVyc2lvbhIJCgcwLjMwLjExShoKDnB5dGhvbl92ZXJz
|
||||||
|
aW9uEggKBjMuMTEuNUoxCgdjcmV3X2lkEiYKJGU3YjFlNDdjLTM4OTQtNDUyYi1hNzRjLWZiZTU3
|
||||||
|
NDUxOWQxOEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoRCgtjcmV3X21lbW9yeRICEABK
|
||||||
|
GgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFK
|
||||||
|
0wQKC2NyZXdfYWdlbnRzEsMECsAEW3siaWQiOiAiYzRmZTBkOTYtNzRkMy00MTk4LWI5MDQtYWFi
|
||||||
|
NzBlZGMxYjQ1IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJnb2FsIjogIk1ha2UgdGhlIGJlc3Qg
|
||||||
|
cmVzZWFyY2ggYW5kIGFuYWx5c2lzIG9uIGNvbnRlbnQgYWJvdXQgQUkgYW5kIEFJIGFnZW50cyIs
|
||||||
|
ICJiYWNrc3RvcnkiOiAiWW91J3JlIGFuIGV4cGVydCByZXNlYXJjaGVyLCBzcGVjaWFsaXplZCBp
|
||||||
|
biB0ZWNobm9sb2d5LCBzb2Z0d2FyZSBlbmdpbmVlcmluZywgQUkgYW5kIHN0YXJ0dXBzLiBZb3Ug
|
||||||
|
d29yayBhcyBhIGZyZWVsYW5jZXIgYW5kIGlzIG5vdyB3b3JraW5nIG9uIGRvaW5nIHJlc2VhcmNo
|
||||||
|
IGFuZCBhbmFseXNpcyBmb3IgYSBuZXcgY3VzdG9tZXIuIiwgInZlcmJvc2U/IjogZmFsc2UsICJt
|
||||||
|
YXhfaXRlciI6IDI1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogbnVsbCwgImxsbSI6ICJ7XCJu
|
||||||
|
YW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00b1wiLCBcInRlbXBlcmF0dXJlXCI6
|
||||||
|
IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6
|
||||||
|
IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoEECgpjcmV3X3Rhc2tzEvIDCu8DW3siaWQiOiAi
|
||||||
|
YzE2NTkyZjktZDM2Yy00MDFlLWJiMTEtYzdlMGY5ODkxZjA2IiwgImRlc2NyaXB0aW9uIjogIkNv
|
||||||
|
bWUgdXAgd2l0aCBhIGxpc3Qgb2YgNSBpbnRlcmVzdGluZyBpZGVhcyB0byBleHBsb3JlIGZvciBh
|
||||||
|
biBhcnRpY2xlLCB0aGVuIHdyaXRlIG9uZSBhbWF6aW5nIHBhcmFncmFwaCBoaWdobGlnaHQgZm9y
|
||||||
|
IGVhY2ggaWRlYSB0aGF0IHNob3djYXNlcyBob3cgZ29vZCBhbiBhcnRpY2xlIGFib3V0IHRoaXMg
|
||||||
|
dG9waWMgY291bGQgYmUuIFJldHVybiB0aGUgbGlzdCBvZiBpZGVhcyB3aXRoIHRoZWlyIHBhcmFn
|
||||||
|
cmFwaCBhbmQgeW91ciBub3Rlcy4iLCAiZXhwZWN0ZWRfb3V0cHV0IjogIjUgYnVsbGV0IHBvaW50
|
||||||
|
cyB3aXRoIGEgcGFyYWdyYXBoIGZvciBlYWNoIGlkZWEuIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBm
|
||||||
|
YWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJOb25lIiwgImNvbnRl
|
||||||
|
eHQiOiBudWxsLCAidG9vbHNfbmFtZXMiOiBbXX1dSioKCHBsYXRmb3JtEh4KHG1hY09TLTE0LjEu
|
||||||
|
MS1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4xLjBKGwoPcGxhdGZv
|
||||||
|
cm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwg
|
||||||
|
VmVyc2lvbiAyMy4xLjA6IE1vbiBPY3QgIDkgMjE6Mjc6MjQgUERUIDIwMjM7IHJvb3Q6eG51LTEw
|
||||||
|
MDAyLjQxLjl+Ni9SRUxFQVNFX0FSTTY0X1Q2MDAwSgoKBGNwdXMSAhgKegIYAYUBAAEAAA==
|
||||||
|
headers:
|
||||||
|
Accept:
|
||||||
|
- '*/*'
|
||||||
|
Accept-Encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Length:
|
||||||
|
- '6493'
|
||||||
|
Content-Type:
|
||||||
|
- application/x-protobuf
|
||||||
|
User-Agent:
|
||||||
|
- OTel-OTLP-Exporter-Python/1.25.0
|
||||||
|
method: POST
|
||||||
|
uri: https://telemetry.crewai.com:4319/v1/traces
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: "\n\0"
|
||||||
|
headers:
|
||||||
|
Content-Length:
|
||||||
|
- '2'
|
||||||
|
Content-Type:
|
||||||
|
- application/x-protobuf
|
||||||
|
Date:
|
||||||
|
- Wed, 03 Jul 2024 15:56:09 GMT
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,335 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are Friendly Neighbor. You are the friendly
|
||||||
|
neighbor\nYour personal goal is: Make everyone feel welcome\nYou ONLY have access
|
||||||
|
to the following tools, and should NEVER make up tools that are not listed here:\n\nDecide
|
||||||
|
Greetings() -> str - Decide Greetings() - Decide what is the appropriate greeting
|
||||||
|
to use\n\nUse the following format:\n\nThought: you should always think about
|
||||||
|
what to do\nAction: the action to take, only one name of [Decide Greetings],
|
||||||
|
just the name, exactly as it''s written.\nAction Input: the input to the action,
|
||||||
|
just a simple python dictionary, enclosed in curly braces, using \" to wrap
|
||||||
|
keys and values.\nObservation: the result of the action\n\nOnce all necessary
|
||||||
|
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
|
||||||
|
the final answer to the original input question\n\nCurrent Task: Say an appropriate
|
||||||
|
greeting.\n\nThis is the expect criteria for your final answer: The greeting.
|
||||||
|
\n you MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||||
|
This is VERY important to you, use the tools available and give your best Final
|
||||||
|
Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||||
|
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '1289'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.9
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
need"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
to"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
decide"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
on"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
an"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
appropriate"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
greeting"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Decide"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Greetings"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Input"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||||
|
{}\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89e305e3c8e382f5-GIG
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Thu, 04 Jul 2024 23:51:24 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=y7BtDW9RWNaYoBExulKsMw50ppqr1itieWbcStDWqVc-1720137084-1.0.1.1-EYCEQ9jOimP45.FgXjdzWftUrV1HHm49W4wbcxFhbrj2DVC1LnMbz9.l.c._AqBRgFAE3xVolosvjmoFDAMPYQ;
|
||||||
|
path=/; expires=Fri, 05-Jul-24 00:21:24 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=pZBoWQ1_gTeUh2oe6ta.S2mxWtdaHvAtn6m2HszLdwk-1720137084219-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '335'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999700'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 1ms
|
||||||
|
x-request-id:
|
||||||
|
- req_b3f7e3c47df2641d6bef704ef3ae8a0f
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"content": "You are Friendly Neighbor. You are the friendly
|
||||||
|
neighbor\nYour personal goal is: Make everyone feel welcome\nYou ONLY have access
|
||||||
|
to the following tools, and should NEVER make up tools that are not listed here:\n\nDecide
|
||||||
|
Greetings() -> str - Decide Greetings() - Decide what is the appropriate greeting
|
||||||
|
to use\n\nUse the following format:\n\nThought: you should always think about
|
||||||
|
what to do\nAction: the action to take, only one name of [Decide Greetings],
|
||||||
|
just the name, exactly as it''s written.\nAction Input: the input to the action,
|
||||||
|
just a simple python dictionary, enclosed in curly braces, using \" to wrap
|
||||||
|
keys and values.\nObservation: the result of the action\n\nOnce all necessary
|
||||||
|
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
|
||||||
|
the final answer to the original input question\n\nCurrent Task: Say an appropriate
|
||||||
|
greeting.\n\nThis is the expect criteria for your final answer: The greeting.
|
||||||
|
\n you MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||||
|
This is VERY important to you, use the tools available and give your best Final
|
||||||
|
Answer, your job depends on it!\n\nThought:\nI need to decide on an appropriate
|
||||||
|
greeting.\n\nAction: Decide Greetings\nAction Input: {}\n\nObservation: Howdy!\n",
|
||||||
|
"role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"], "stream":
|
||||||
|
true, "temperature": 0.7}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '1404'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
cookie:
|
||||||
|
- __cf_bm=y7BtDW9RWNaYoBExulKsMw50ppqr1itieWbcStDWqVc-1720137084-1.0.1.1-EYCEQ9jOimP45.FgXjdzWftUrV1HHm49W4wbcxFhbrj2DVC1LnMbz9.l.c._AqBRgFAE3xVolosvjmoFDAMPYQ;
|
||||||
|
_cfuvid=pZBoWQ1_gTeUh2oe6ta.S2mxWtdaHvAtn6m2HszLdwk-1720137084219-0.0.1.1-604800000
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.9
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
know"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||||
|
How"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89e305ea4abc82f5-GIG
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Thu, 04 Jul 2024 23:51:24 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '91'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '16000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '15999673'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 1ms
|
||||||
|
x-request-id:
|
||||||
|
- req_10032db16fa190e8435947a6aaa700ff
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
@@ -359,41 +359,125 @@ def test_api_calls_throttling(capsys):
|
|||||||
moveon.assert_called()
|
moveon.assert_called()
|
||||||
|
|
||||||
|
|
||||||
|
# This test is not consistent, some issue is happening on the CI when it comes to Prompt tokens
|
||||||
|
# {'usage_metrics': {'completion_tokens': 34, 'prompt_tokens': 0, 'successful_requests': 2, 'total_tokens': 34}} CI OUTPUT
|
||||||
|
# {'usage_metrics': {'completion_tokens': 34, 'prompt_tokens': 314, 'successful_requests': 2, 'total_tokens': 348}}
|
||||||
|
# The issue migh be related to the calculate_usage_metrics function
|
||||||
|
# @pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
# def test_crew_full_output():
|
||||||
|
# agent = Agent(
|
||||||
|
# role="test role",
|
||||||
|
# goal="test goal",
|
||||||
|
# backstory="test backstory",
|
||||||
|
# allow_delegation=False,
|
||||||
|
# verbose=True,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# task1 = Task(
|
||||||
|
# description="just say hi!",
|
||||||
|
# expected_output="your greeting",
|
||||||
|
# agent=agent,
|
||||||
|
# )
|
||||||
|
# task2 = Task(
|
||||||
|
# description="just say hello!",
|
||||||
|
# expected_output="your greeting",
|
||||||
|
# agent=agent,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# crew = Crew(agents=[agent], tasks=[task1, task2], full_output=True)
|
||||||
|
|
||||||
|
# result = crew.kickoff()
|
||||||
|
|
||||||
|
# assert result == {
|
||||||
|
# "final_output": "Hello!",
|
||||||
|
# "tasks_outputs": [task1.output, task2.output],
|
||||||
|
# "usage_metrics": {
|
||||||
|
# "total_tokens": 348,
|
||||||
|
# "prompt_tokens": 314,
|
||||||
|
# "completion_tokens": 34,
|
||||||
|
# "successful_requests": 2,
|
||||||
|
# },
|
||||||
|
# }
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
def test_crew_full_ouput():
|
def test_crew_kickoff_for_each_full_ouput():
|
||||||
|
inputs = [
|
||||||
|
{"topic": "dog"},
|
||||||
|
{"topic": "cat"},
|
||||||
|
{"topic": "apple"},
|
||||||
|
]
|
||||||
|
|
||||||
agent = Agent(
|
agent = Agent(
|
||||||
role="test role",
|
role="{topic} Researcher",
|
||||||
goal="test goal",
|
goal="Express hot takes on {topic}.",
|
||||||
backstory="test backstory",
|
backstory="You have a lot of experience with {topic}.",
|
||||||
allow_delegation=False,
|
|
||||||
verbose=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
task1 = Task(
|
task = Task(
|
||||||
description="just say hi!",
|
description="Give me an analysis around {topic}.",
|
||||||
expected_output="your greeting",
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
agent=agent,
|
|
||||||
)
|
|
||||||
task2 = Task(
|
|
||||||
description="just say hello!",
|
|
||||||
expected_output="your greeting",
|
|
||||||
agent=agent,
|
agent=agent,
|
||||||
)
|
)
|
||||||
|
|
||||||
crew = Crew(agents=[agent], tasks=[task1, task2], full_output=True)
|
crew = Crew(agents=[agent], tasks=[task], full_output=True)
|
||||||
|
results = crew.kickoff_for_each(inputs=inputs)
|
||||||
|
|
||||||
result = crew.kickoff()
|
assert len(results) == len(inputs)
|
||||||
|
for result in results:
|
||||||
|
assert "usage_metrics" in result
|
||||||
|
assert isinstance(result["usage_metrics"], dict)
|
||||||
|
|
||||||
assert result == {
|
# Assert that all required keys are in usage_metrics and their values are not None
|
||||||
"final_output": "Hello!",
|
for key in [
|
||||||
"tasks_outputs": [task1.output, task2.output],
|
"total_tokens",
|
||||||
"usage_metrics": {
|
"prompt_tokens",
|
||||||
"total_tokens": 517,
|
"completion_tokens",
|
||||||
"prompt_tokens": 466,
|
"successful_requests",
|
||||||
"completion_tokens": 51,
|
]:
|
||||||
"successful_requests": 3,
|
assert key in result["usage_metrics"]
|
||||||
},
|
assert result["usage_metrics"][key] > 0
|
||||||
}
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_crew_async_kickoff_for_each_full_ouput():
|
||||||
|
inputs = [
|
||||||
|
{"topic": "dog"},
|
||||||
|
{"topic": "cat"},
|
||||||
|
{"topic": "apple"},
|
||||||
|
]
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
crew = Crew(agents=[agent], tasks=[task], full_output=True)
|
||||||
|
results = await crew.kickoff_for_each_async(inputs=inputs)
|
||||||
|
|
||||||
|
assert len(results) == len(inputs)
|
||||||
|
for result in results:
|
||||||
|
assert "usage_metrics" in result
|
||||||
|
assert isinstance(result["usage_metrics"], dict)
|
||||||
|
|
||||||
|
# Assert that all required keys are in usage_metrics and their values are not None
|
||||||
|
for key in [
|
||||||
|
"total_tokens",
|
||||||
|
"prompt_tokens",
|
||||||
|
"completion_tokens",
|
||||||
|
"successful_requests",
|
||||||
|
]:
|
||||||
|
assert key in result["usage_metrics"]
|
||||||
|
# TODO: FIX THIS WHEN USAGE METRICS ARE RE-DONE
|
||||||
|
# assert result["usage_metrics"][key] > 0
|
||||||
|
|
||||||
|
|
||||||
def test_agents_rpm_is_never_set_if_crew_max_RPM_is_not_set():
|
def test_agents_rpm_is_never_set_if_crew_max_RPM_is_not_set():
|
||||||
@@ -417,13 +501,13 @@ def test_agents_rpm_is_never_set_if_crew_max_RPM_is_not_set():
|
|||||||
|
|
||||||
|
|
||||||
def test_async_task_execution():
|
def test_async_task_execution():
|
||||||
import threading
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from unittest.mock import patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from crewai.tasks.task_output import TaskOutput
|
from crewai.tasks.task_output import TaskOutput
|
||||||
|
|
||||||
list_ideas = Task(
|
list_ideas = Task(
|
||||||
description="Give me a list of 5 interesting ideas to explore for na article, what makes them unique and interesting.",
|
description="Give me a list of 5 interesting ideas to explore for an article, what makes them unique and interesting.",
|
||||||
expected_output="Bullet point list of 5 important events.",
|
expected_output="Bullet point list of 5 important events.",
|
||||||
agent=researcher,
|
agent=researcher,
|
||||||
async_execution=True,
|
async_execution=True,
|
||||||
@@ -449,10 +533,11 @@ def test_async_task_execution():
|
|||||||
|
|
||||||
with patch.object(Agent, "execute_task") as execute:
|
with patch.object(Agent, "execute_task") as execute:
|
||||||
execute.return_value = "ok"
|
execute.return_value = "ok"
|
||||||
with patch.object(threading.Thread, "start") as start:
|
with patch.object(ThreadPoolExecutor, "submit") as submit:
|
||||||
thread = threading.Thread(target=lambda: None, args=()).start()
|
future = MagicMock()
|
||||||
start.return_value = thread
|
future.result.return_value = "ok"
|
||||||
with patch.object(threading.Thread, "join", wraps=thread.join()) as join:
|
submit.return_value = future
|
||||||
|
|
||||||
list_ideas.output = TaskOutput(
|
list_ideas.output = TaskOutput(
|
||||||
description="A 4 paragraph article about AI.",
|
description="A 4 paragraph article about AI.",
|
||||||
raw_output="ok",
|
raw_output="ok",
|
||||||
@@ -464,8 +549,264 @@ def test_async_task_execution():
|
|||||||
agent="writer",
|
agent="writer",
|
||||||
)
|
)
|
||||||
crew.kickoff()
|
crew.kickoff()
|
||||||
start.assert_called()
|
submit.assert_called()
|
||||||
join.assert_called()
|
future.result.assert_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_kickoff_for_each_single_input():
|
||||||
|
"""Tests if kickoff_for_each works with a single input."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
inputs = [{"topic": "dog"}]
|
||||||
|
expected_outputs = ["Dogs are loyal companions and popular pets."]
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(Agent, "execute_task") as mock_execute_task:
|
||||||
|
mock_execute_task.side_effect = expected_outputs
|
||||||
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
|
results = crew.kickoff_for_each(inputs=inputs)
|
||||||
|
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results == expected_outputs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_kickoff_for_each_multiple_inputs():
|
||||||
|
"""Tests if kickoff_for_each works with multiple inputs."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
inputs = [
|
||||||
|
{"topic": "dog"},
|
||||||
|
{"topic": "cat"},
|
||||||
|
{"topic": "apple"},
|
||||||
|
]
|
||||||
|
expected_outputs = [
|
||||||
|
"Dogs are loyal companions and popular pets.",
|
||||||
|
"Cats are independent and low-maintenance pets.",
|
||||||
|
"Apples are a rich source of dietary fiber and vitamin C.",
|
||||||
|
]
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(Agent, "execute_task") as mock_execute_task:
|
||||||
|
mock_execute_task.side_effect = expected_outputs
|
||||||
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
|
results = crew.kickoff_for_each(inputs=inputs)
|
||||||
|
|
||||||
|
assert len(results) == len(inputs)
|
||||||
|
for i, res in enumerate(results):
|
||||||
|
assert res == expected_outputs[i]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_kickoff_for_each_empty_input():
|
||||||
|
"""Tests if kickoff_for_each handles an empty input list."""
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
|
results = crew.kickoff_for_each(inputs=[])
|
||||||
|
assert results == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_kickoff_for_each_invalid_input():
|
||||||
|
"""Tests if kickoff_for_each raises TypeError for invalid input types."""
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
# Pass a string instead of a list
|
||||||
|
crew.kickoff_for_each("invalid input")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_kickoff_for_each_error_handling():
|
||||||
|
"""Tests error handling in kickoff_for_each when kickoff raises an error."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
inputs = [
|
||||||
|
{"topic": "dog"},
|
||||||
|
{"topic": "cat"},
|
||||||
|
{"topic": "apple"},
|
||||||
|
]
|
||||||
|
expected_outputs = [
|
||||||
|
"Dogs are loyal companions and popular pets.",
|
||||||
|
"Cats are independent and low-maintenance pets.",
|
||||||
|
"Apples are a rich source of dietary fiber and vitamin C.",
|
||||||
|
]
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
|
|
||||||
|
with patch.object(Crew, "kickoff") as mock_kickoff:
|
||||||
|
mock_kickoff.side_effect = expected_outputs[:2] + [
|
||||||
|
Exception("Simulated kickoff error")
|
||||||
|
]
|
||||||
|
with pytest.raises(Exception, match="Simulated kickoff error"):
|
||||||
|
crew.kickoff_for_each(inputs=inputs)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_kickoff_async_basic_functionality_and_output():
|
||||||
|
"""Tests the basic functionality and output of kickoff_async."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
inputs = {"topic": "dog"}
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the crew
|
||||||
|
crew = Crew(
|
||||||
|
agents=[agent],
|
||||||
|
tasks=[task],
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_output = "This is a sample output from kickoff."
|
||||||
|
with patch.object(Crew, "kickoff", return_value=expected_output) as mock_kickoff:
|
||||||
|
result = await crew.kickoff_async(inputs)
|
||||||
|
|
||||||
|
assert isinstance(result, str), "Result should be a string"
|
||||||
|
assert result == expected_output, "Result should match expected output"
|
||||||
|
mock_kickoff.assert_called_once_with(inputs)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_async_kickoff_for_each_async_basic_functionality_and_output():
|
||||||
|
"""Tests the basic functionality and output of akickoff_for_each_async."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
inputs = [
|
||||||
|
{"topic": "dog"},
|
||||||
|
{"topic": "cat"},
|
||||||
|
{"topic": "apple"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Define expected outputs for each input
|
||||||
|
expected_outputs = [
|
||||||
|
"Dogs are loyal companions and popular pets.",
|
||||||
|
"Cats are independent and low-maintenance pets.",
|
||||||
|
"Apples are a rich source of dietary fiber and vitamin C.",
|
||||||
|
]
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
Crew, "kickoff_async", side_effect=expected_outputs
|
||||||
|
) as mock_kickoff_async:
|
||||||
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
|
|
||||||
|
results = await crew.kickoff_for_each_async(inputs)
|
||||||
|
|
||||||
|
assert len(results) == len(inputs)
|
||||||
|
assert results == expected_outputs
|
||||||
|
for input_data in inputs:
|
||||||
|
mock_kickoff_async.assert_any_call(inputs=input_data)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_async_kickoff_for_each_async_empty_input():
|
||||||
|
"""Tests if akickoff_for_each_async handles an empty input list."""
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the crew
|
||||||
|
crew = Crew(
|
||||||
|
agents=[agent],
|
||||||
|
tasks=[task],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call the function we are testing
|
||||||
|
results = await crew.kickoff_for_each_async([])
|
||||||
|
|
||||||
|
# Assertion
|
||||||
|
assert results == [], "Result should be an empty list when input is empty"
|
||||||
|
|
||||||
|
|
||||||
def test_set_agents_step_callback():
|
def test_set_agents_step_callback():
|
||||||
@@ -618,6 +959,9 @@ def test_code_execution_flag_adds_code_tool_upon_kickoff():
|
|||||||
)
|
)
|
||||||
|
|
||||||
crew = Crew(agents=[programmer], tasks=[task])
|
crew = Crew(agents=[programmer], tasks=[task])
|
||||||
|
|
||||||
|
with patch.object(Agent, "execute_task") as executor:
|
||||||
|
executor.return_value = "ok"
|
||||||
crew.kickoff()
|
crew.kickoff()
|
||||||
assert len(programmer.tools) == 1
|
assert len(programmer.tools) == 1
|
||||||
assert programmer.tools[0].__class__ == CodeInterpreterTool
|
assert programmer.tools[0].__class__ == CodeInterpreterTool
|
||||||
@@ -691,6 +1035,29 @@ def test_agent_usage_metrics_are_captured_for_sequential_process():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_sequential_crew_creation_tasks_without_agents():
|
||||||
|
task = Task(
|
||||||
|
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
||||||
|
expected_output="5 bullet points with a paragraph for each idea.",
|
||||||
|
# agent=researcher, # not having an agent on the task should throw an error
|
||||||
|
)
|
||||||
|
|
||||||
|
# Expected Output: The sequential crew should fail to create because the task is missing an agent
|
||||||
|
with pytest.raises(pydantic_core._pydantic_core.ValidationError) as exec_info:
|
||||||
|
Crew(
|
||||||
|
tasks=[task],
|
||||||
|
agents=[researcher],
|
||||||
|
process=Process.sequential,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert exec_info.value.errors()[0]["type"] == "missing_agent_in_task"
|
||||||
|
assert (
|
||||||
|
"Agent is missing in the task with the following description"
|
||||||
|
in exec_info.value.errors()[0]["msg"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
def test_agent_usage_metrics_are_captured_for_hierarchical_process():
|
def test_agent_usage_metrics_are_captured_for_hierarchical_process():
|
||||||
from langchain_openai import ChatOpenAI
|
from langchain_openai import ChatOpenAI
|
||||||
@@ -715,13 +1082,68 @@ def test_agent_usage_metrics_are_captured_for_hierarchical_process():
|
|||||||
assert result == '"Howdy!"'
|
assert result == '"Howdy!"'
|
||||||
|
|
||||||
assert crew.usage_metrics == {
|
assert crew.usage_metrics == {
|
||||||
"total_tokens": 1616,
|
"total_tokens": 1927,
|
||||||
"prompt_tokens": 1333,
|
"prompt_tokens": 1557,
|
||||||
"completion_tokens": 283,
|
"completion_tokens": 370,
|
||||||
"successful_requests": 3,
|
"successful_requests": 4,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_hierarchical_crew_creation_tasks_with_agents():
|
||||||
|
"""
|
||||||
|
Agents are not required for tasks in a hierarchical process but sometimes they are still added
|
||||||
|
This test makes sure that the manager still delegates the task to the agent even if the agent is passed in the task
|
||||||
|
"""
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Write one amazing paragraph about AI.",
|
||||||
|
expected_output="A single paragraph with 4 sentences.",
|
||||||
|
agent=writer,
|
||||||
|
)
|
||||||
|
|
||||||
|
crew = Crew(
|
||||||
|
tasks=[task],
|
||||||
|
agents=[writer, researcher],
|
||||||
|
process=Process.hierarchical,
|
||||||
|
manager_llm=ChatOpenAI(model="gpt-4o"),
|
||||||
|
)
|
||||||
|
crew.kickoff()
|
||||||
|
assert crew.manager_agent is not None
|
||||||
|
assert crew.manager_agent.tools is not None
|
||||||
|
assert crew.manager_agent.tools[0].description.startswith(
|
||||||
|
"Delegate a specific task to one of the following coworkers: [Senior Writer]"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_hierarchical_crew_creation_tasks_without_async_execution():
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
||||||
|
expected_output="5 bullet points with a paragraph for each idea.",
|
||||||
|
async_execution=True, # should throw an error
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(pydantic_core._pydantic_core.ValidationError) as exec_info:
|
||||||
|
Crew(
|
||||||
|
tasks=[task],
|
||||||
|
agents=[researcher],
|
||||||
|
process=Process.hierarchical,
|
||||||
|
manager_llm=ChatOpenAI(model="gpt-4o"),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
exec_info.value.errors()[0]["type"] == "async_execution_in_hierarchical_process"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
"Hierarchical process error: Tasks cannot be flagged with async_execution."
|
||||||
|
in exec_info.value.errors()[0]["msg"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_crew_inputs_interpolate_both_agents_and_tasks():
|
def test_crew_inputs_interpolate_both_agents_and_tasks():
|
||||||
agent = Agent(
|
agent = Agent(
|
||||||
role="{topic} Researcher",
|
role="{topic} Researcher",
|
||||||
@@ -732,9 +1154,10 @@ def test_crew_inputs_interpolate_both_agents_and_tasks():
|
|||||||
task = Task(
|
task = Task(
|
||||||
description="Give me an analysis around {topic}.",
|
description="Give me an analysis around {topic}.",
|
||||||
expected_output="{points} bullet points about {topic}.",
|
expected_output="{points} bullet points about {topic}.",
|
||||||
|
agent=agent,
|
||||||
)
|
)
|
||||||
|
|
||||||
crew = Crew(agents=[agent], tasks=[task], inputs={"topic": "AI", "points": 5})
|
crew = Crew(agents=[agent], tasks=[task])
|
||||||
inputs = {"topic": "AI", "points": 5}
|
inputs = {"topic": "AI", "points": 5}
|
||||||
crew._interpolate_inputs(inputs=inputs) # Manual call for now
|
crew._interpolate_inputs(inputs=inputs) # Manual call for now
|
||||||
|
|
||||||
@@ -1039,6 +1462,7 @@ def test_crew_train_success(task_evaluator, crew_training_handler, kickoff):
|
|||||||
task = Task(
|
task = Task(
|
||||||
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
||||||
expected_output="5 bullet points with a paragraph for each idea.",
|
expected_output="5 bullet points with a paragraph for each idea.",
|
||||||
|
agent=researcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
crew = Crew(
|
crew = Crew(
|
||||||
@@ -1093,6 +1517,7 @@ def test_crew_train_error():
|
|||||||
task = Task(
|
task = Task(
|
||||||
description="Come up with a list of 5 interesting ideas to explore for an article",
|
description="Come up with a list of 5 interesting ideas to explore for an article",
|
||||||
expected_output="5 bullet points with a paragraph for each idea.",
|
expected_output="5 bullet points with a paragraph for each idea.",
|
||||||
|
agent=researcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
crew = Crew(
|
crew = Crew(
|
||||||
@@ -1114,6 +1539,7 @@ def test__setup_for_training():
|
|||||||
task = Task(
|
task = Task(
|
||||||
description="Come up with a list of 5 interesting ideas to explore for an article",
|
description="Come up with a list of 5 interesting ideas to explore for an article",
|
||||||
expected_output="5 bullet points with a paragraph for each idea.",
|
expected_output="5 bullet points with a paragraph for each idea.",
|
||||||
|
agent=researcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
crew = Crew(
|
crew = Crew(
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ interactions:
|
|||||||
accept:
|
accept:
|
||||||
- application/json
|
- application/json
|
||||||
accept-encoding:
|
accept-encoding:
|
||||||
- gzip, deflate, br
|
- gzip, deflate
|
||||||
connection:
|
connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
content-length:
|
content-length:
|
||||||
@@ -18,7 +18,7 @@ interactions:
|
|||||||
host:
|
host:
|
||||||
- api.openai.com
|
- api.openai.com
|
||||||
user-agent:
|
user-agent:
|
||||||
- OpenAI/Python 1.25.1
|
- OpenAI/Python 1.35.10
|
||||||
x-stainless-arch:
|
x-stainless-arch:
|
||||||
- arm64
|
- arm64
|
||||||
x-stainless-async:
|
x-stainless-async:
|
||||||
@@ -28,7 +28,7 @@ interactions:
|
|||||||
x-stainless-os:
|
x-stainless-os:
|
||||||
- MacOS
|
- MacOS
|
||||||
x-stainless-package-version:
|
x-stainless-package-version:
|
||||||
- 1.25.1
|
- 1.35.10
|
||||||
x-stainless-runtime:
|
x-stainless-runtime:
|
||||||
- CPython
|
- CPython
|
||||||
x-stainless-runtime-version:
|
x-stainless-runtime-version:
|
||||||
@@ -38,137 +38,135 @@ interactions:
|
|||||||
response:
|
response:
|
||||||
body:
|
body:
|
||||||
string: !!binary |
|
string: !!binary |
|
||||||
YXyDACB/th//38/PZLRrT7ldIgJZnBmfn4CJAiq+LyCiIKJAAqSrdXLgkEHeFhU8w8wDSxSD5oc6
|
H4sIAAAAAAAAA1SXya6zypKF5/cpfp0pJdnGhgzOjM6AAWdiwF2NAGMMGGy6JMn78ld7n1KVasIg
|
||||||
WflNKJzM7Iut//7nz5+/fdFU5fT33z9/369x+vuff/78+XvPp/zvv3/+1z9//vz5899R6Vh1RXW/
|
WUpC0Xyx+Pe//vz565NWeTb+9fefv97lMP71Xz9nj2RM/vr7z3//68+fP3/+/fv8f8q8SfPHo2yL
|
||||||
vz51iwDwor7+/fePmAWQRvTvn787bSEMK+stofY5z+A4vzmmFLXFYr0KgCU+lpRH0gctwh5hOJ+6
|
X/nvy7J95Oyvv/+s//fk/0R///lr/XoH9NAniUEfqIshdtmIO7GvU16cEcBHybJpkwZtv1gmwoDU
|
||||||
HYvObV7Q6fuREQhFifllnkK6uJ9cVo5fC0uvRuPrc9leAFNbInanzsWsykWOWutesQOUh2G7vg4V
|
cPur92iMr2vUZ/0DC59+9Eb9oIQ7Xd7rWMArhc+Pep/AbipEYqcwe7NK0hx9XelB3Yd67EWIjyVy
|
||||||
aklssHCBapjux2yGNjkR9sju72au1duMYlZlhDC8onGvhisqFPvOyLUmDReMY4rq92QzXXxci/Hy
|
NVmnmvTJvakVbgNU5zOhJ1F6RwzdpRmdZvdOyIov/TRAPyFrKXLqPFJSsUM4x6hr9ibVWX71Jtun
|
||||||
3ACMpx9hnvS7J9PozDV8TqZJDt/hhvjz3VIwPq5EtVyshsZS31jTLmZINwyvfClP+xGqIULkUC44
|
AIueEIqN5mGMz6taANknOiHD/Y5muTUnUEp9O0mOvUmLer3BCotFf1qjeOEz9qUBrpqDyF59YG8z
|
||||||
3JXfWABr6TbEWycvmYd1e4SyrlsSbOqlWbUqi+G8JR0LHts45GFaC9rvt7HxHO9wM27epxi66e2z
|
q5UOXBNWRD+bdsXWwyaAuLnVxLm6rJqPn10IHW8banrP0OPZtRCU9WibWLhqOKKbULtAo4wHeqj1
|
||||||
Qyae+DyzDSD2KUqs3ZdPM3/TAqCXnRML9nbV8CvcbWAs6/Gqf3XEydJncChMkdLa/hbTN9uPcPVP
|
E1/aIVBhe92meDmbbcUvGAHEu/uJertPXjGkjCbUOGvxzklUxJfDLoP8Jc9T216+3lAm0gBy//1O
|
||||||
PYWZdAOvpdRE8tbwyP449M0aGNOsnqfCwLNc8mGKyliFm1CMeHOuTT7Lez4i6j8JifLnbNFQBxFa
|
si006Zyppo427tcmbnn4VItMBVWuN4aGxfnDPZpeQxlcg494px90PhOdr9EF3piYp3k2JtD9NQzv
|
||||||
Sb4ST2vNZneqNy/4WUHNdKKGfFWepYnGbSzgXeS8i1lRogsIX0/ENDrKRVc4L139jFKEBe8SFfzh
|
9ZVoh7NerfOElDBN5ZNaaePxOZQ8AeF3pGLUFG9vfg3DBab1aovpcJdQPbzYIA+ni49Z+/TThV29
|
||||||
hTk62UVAokflIL6ZawGEcywRwkNize/r7QjcFnTiFt93yJStH6AsaVz6zi5BSPfm8ahF6BYz++TG
|
BMmIukQdE8vjzqEQoA1DkWh1S6JF+hwCaO8XlRjFukbjtPu4yDcqdfqk3gHRdF8EyihJIXWv2zDi
|
||||||
zRJOAVZ1ndyp0Nhs4Dg5vKD+0h/dST8tWUZHrpUbFlaC80BrpgZGF0bl2BLsXaJieQoOwCS+TOYI
|
0bq7yNtNmk/yyqP9Eu+OOVhb/TuxhSjRUt6vppSkwkLMaKVE9L73XThqTk3IfvJTZqQbgD1ONOqW
|
||||||
yC/WOEp1sKblQ6wTvxZDfDRWIMolJf7XPTbcUcIUaZUuMOdzEDhVljgHT+IPKqWjOXAafWtQ0XjC
|
B6f/yZcKxubQEk9Lz2l3n9gC30sZE+2iBNVP/DeULb5A99uvwOkDlRf4fp7FpOSDns7nx7eAazYE
|
||||||
y8M+D1x5GzP8YuGEP3sfis9l2mO4Hu2OmXFdhiwVniVkYR2zw3VXoPV0sAE+TjER6/us+frR0pe6
|
eH7fzj1DdzbDK5pO+LUQJX1fygOGTnTeVMXvFA0aeRVg+kFItecjRUvXxQCqzwfiOEnBmfbmohw7
|
||||||
aEjAqUdm9NwooQQPNbkS53T7FGtSfEv4vVaTHYi2b7az1frwEXc6VoIFEI3HR4TkxXiywAiihB+f
|
Ox2/lUz33t6ARCjc6krcLW89br3vGcSOrFMj7Q/V9r43HSi0SsNyICjedNJogPROe1G/FXyD9w9d
|
||||||
L12rzygh1kEqwmW2Rh/t8OVMTBoaXDppbg15QHaMXGrJWmb+HuHxKjmLTh87YfNgSptbsInwZgnN
|
VbR2FZHjK009ft/7Dorqy4Vo3U3j4mQ29W/9KEaZaPC2OQ/wJNlC9d3N5NNzDMWV7xgehuNaj5iY
|
||||||
ZPldsxk1yQ5IwL/Ap6ZkKbwvz5ocOroLl/JkjNDYeksVdGisZS/8KvDCNGS4NZxmdMsiB+FQnoi7
|
3gaUhTYQe4uBD1m1iqE0jgXZR3jrzeKHDSBYdTUpzb4ylkCSc3j7pkf1ZrIMSgYvgY02hERlkuTN
|
||||||
5ErIxUz5Ie+3SYnf7p8hPx+0DK3vS8UcXNrNgsAIwIl+Bzo/h85iG6Oa0XnfWfRjYYkvZuK+4Gxw
|
z/pbouewisnRlgvE9uoxQ9ZS5tQVezNixVfDsA/l4wTJ+c2p6Qgz0odOm4qduzUW69OU0DPiTm3u
|
||||||
l37D5ZJQXE0RXAwzIHkhn9FSHR8YlHEo6SAqXrPml1OqhdHTYcQbq2G+j10E+6U6YunRFeGq+7cU
|
nzlVbqMPq9fikqS4XTx2vlIMRtFnU/kzr7z3tFjZS/f973z1/LS2fIBDQrDym5+jeAiA2KeR2JWg
|
||||||
DOtOial/dcT9C1rh1Jojc8fc4cu+mijSbWLRb/ZWEdv2h1ohyhmxSIxenJ8nNQBrmnXi+tuJz/rs
|
9oto9AvYdTNQa7nvq2VjPCbUSFiYhqBCHm3ihyodr29EjX5fVgvMiQtfZKqE3IrxZ75vOnxp0VFf
|
||||||
mzDN48CM3NugT1bUAsSszFhFji1nZKlzhPPkQdzvvbH444M7FLMyY54X8nB6IxPDfqmOLNjbVbN4
|
T7X+s7kwHe7n253GbKj5sEVqjqIbfRJvYJXB9XBqUH7b3amjFdwbnbAMgWzz4B8ecM2V1mCnzZlY
|
||||||
5CYCH7szMbP8g1ZL2pTIOm8sEmi1jfiCvF7dNiePeU8FELuO2RFMPIWUXg8qYoJxvCD7acZ4paHB
|
xtD2vKKrDBF9axA9ArPnp5v9kWMuWT+8U7zBeO0CuHz2xvQVuYyo2swJkvc4xOyn/ov/LHaKYbc1
|
||||||
Fx/Xsvb8hi0xnW4upneo/8B6ryIxf9hpVsOrc6jCzw1r5YItLiw3Fe1NU6M/YkxouKk8AkNVD7gW
|
MWV/9ui1VxvQHX1NDsFhz/mPHljDb3jnfrHBlbUkovqaC1PxvY+os0zuQ9G4GDeJknn0Tm4mnFnR
|
||||||
buUwkdy3gbGsx7Kq0oQ/nQ9A6auYGfOWoVn+CioIt7hk1pG34eLUqqugxs/Y+XT7JNS1Bxte/uLS
|
YWH1mKIlTVqAEcuY+vOTosU7YxkqScioPqxqj8n3fpL0zedGky1vDdrGvQnGPChTUTkUTavsckNI
|
||||||
l9PNxdg7bo1KuFDii9rDWvS2T2H4RT6x9ZgmPd7hUomT75YdtPJjTeOli6F7t2cSYCMtFtbeRBCc
|
ukzEV7VnxNBmF0O4BA5R18tUdY9BGKTJvq+pS66tMazlSwinsL4QFemx9xPvGoTY3lNrOBTGWLHE
|
||||||
h8NC4LU1ObXqgjhaBK+yaQ2/lrxneNP0zgqBj4ieg32ELuPPwfPySoefKS06RJ82ZcfxLaC53+57
|
gnS2CRafrel91fd7Bvk+PGjGrwMarO7goE+dWxjB94K+TGUm9LsgpuFeExCXhe8HRY9jRw+5JFY/
|
||||||
9MDbH/NeipRQq9tTuA7qjOFZsHBtybQiigSfufPVtNhm7gV4pe8T89/7NJmVmxFo/WxXzFi234LL
|
/J1A2rgMw9ei3nw7jAt6prpDNcb0ajD5TgCwNgHV50sc8SVlrhJ5QU7dvfVNuRm5E9za9YX68V6N
|
||||||
b5VCZWVXRnCjW2v601LE+1Vgrr898MVePhJAxVVm6t8azcvDxMiPiwIzwaEhO+lwkYe8EAj+CX3I
|
FvmhxOgZCQL1xt2xmoNdK4J0r2RqV0LRz/6nxGiUUIhpK00pvSpaKB20XiDaWvukfG23FxA+74B6
|
||||||
1fYaA1adI4ueBQmXG78dIbjbIZbryOYNLWkKrRdviVldj4j6iavDtXU4iR7Vo5m1jgcogCll9rB7
|
N0z+p5+70cFYijSdJ1c1v8FZ1DeEPD5BP16/G4DkfufEnNxnxbyEu+hmjzFVramIBs+3S/jtRyvA
|
||||||
cLqLdy8Yp/LGrLFqhvlGuhkSqTawqAx3viDY+yh/ixG5pOOrWc5uUqLddi6JIbO+mBOr+qFr5t0Y
|
Vc9vF3GG9h6rWCLPR8X0+OAj8Rb75BmoZcVWayNGWT1kRM8en/SffvZl+06dV9bypi19C76VcsaS
|
||||||
mXBnfVS7dOEpa2cs2iRoZq3jPkzteGLVJbCQJFlND1plClSOz5a13Pgp1iS1FvBuntJkKRZNRK9j
|
p7oVO7+4A69oONEHyw20PZVRDa/NJEyr590w+FHUQgW/TypefvOnqeMOXbqPPq2CXEmHY17HIBXv
|
||||||
a1PBuGkhsxXxCGH0dIj1+hTD9PPyDFzeYNxeLD/Zpp6dwQNvf8wOoymcRL3OtL6OJ6IXiz6suxWO
|
PXF2Y5ZSduhu8G5tgt/N14lEItU3+K2XVz4HNKLNHCuXpzUQw5HVfhGaLICQ9i2xxN2uYmZm6tCW
|
||||||
YE38QyLYy5xPWmrClckXUj6qTbOyzMAgby0Py8ZTbpYuuAQgveqQOPsH5qO5KWQIWnkmTvDoi0WJ
|
8ZVcibyqmCBoGLZDamNuBrtqZqblgm2bLjEEAVfUtxFAYzszscj+k7Jv7hRwuBw7qsP2yhcNLAfd
|
||||||
5Bqy4fxjpL5eOf8eLxGyD7c9cQTUJ3yguxfck43Nwq3ytOZvm6dwe31exCeZnmzX2LOhcIYWP2vB
|
jOOBWNvXJ2JTaDfwLluTavnqZSz9rYvBflQvovauGoliezVBaZ4v/NJA411+OGX/7OsqjrRoVtqP
|
||||||
SL728sxAu1grZrJjJLP5yKiqRweJzpH6HlbRTjuob6qCN9/Tl3PByFLIiotFnDJxE340phixpKuI
|
KD/lhzhxm71TxpN1A+1K3OGNlH75vDF3MXSfziAO7qxocWQlRF6j50Tfd1M1kZYt0F6YNknCpk/5
|
||||||
4SW0GSfjJMGrvZl0Te/DMCdNkkGtvQyiN0PMZ5PEKcpz02CH68602p3aYZh/W2DeOnnJcnaLChXf
|
oBkZ/PgtYnRZyHmyD330ux/NNpmqqtatEHr/DfTX7/AffiNyWA/EMU77fr7SOQBjnhTi5fm64mNk
|
||||||
40gCM3AQb/nxCGb1AmLvqNgse9raYAbTm/jpebZmvV5bYE1GiJHZrFinL1ORdRcI3Wntq/kZn6GD
|
OhDq+zcxvvFszKezXgMcboQQHlJvri0qo2508bR9WVXV22nawGYVxMQsQ4/zYLhn0FFDwlxUxHSm
|
||||||
G21TEjlTyNfp+y1BRUzG0u8uFau8himS1ueK47gurXl+cB9u8RoQcwwpp8W2aSEwTj0xYLSH3dY3
|
4AVoh5QtPrv7zPjpTwcUVXcJLhNqTEoc1ZA13w/x6cpMt21cmfK+lyuqKuGds1Mf7xQ62YQc63iu
|
||||||
UzVGccNI3Nz4Ui62rO00Toi3/f2sNiN1CnllYcpwU4dj+npVEDUdZVH2tIq1mZMKOGQuHS7YaBbW
|
3o9ijiFfpaepy1nhDeWwJPDDF6q/GyOd40eUQ8/nwzRGkVaxcRea8qLcQ7pv1F3fdetih+7+TafH
|
||||||
nkT1eNjHLLgiZRgOlj4i4X40mdEKKFxFO23BUxeH2Dc9tHhKBxfpHzEkx0C7WOwykR7GU0+Ybuu8
|
V4q8xf6sawjDg0VUY+PyeesjF5Vd7JFQ+14iKq3JB0a8wxSLMY/GVal/lB++4o1ovFIurx8TrD9d
|
||||||
mbaPtdfGb8XxdgiexTLziYKLXjnxtPbVLMrU/qC4VQjXbe9z6aj5LsiL9STeQ2F8GqTGBLaJboQI
|
QjTGymqeC3OC1dEFPH7ODl/n6seFXbl6keMNUT7dskqHxdolZP+RsLE0gwaAhehCXf0Y8WnEQ4YU
|
||||||
Drbmzn0C7LF1YX6UJZy+lihDy8sRsbDKdTPHojlC1LSUWemsJzPXmheyvmvOMDm2aF6FskT5W4qI
|
TdrgbSUU1ZyH4Qw0qCfqw0uNuJ1GDYKjnFBLr2vE3WrIEBcbnxzG7onmn30ExxPeU+++0nqxKz8Y
|
||||||
kXsPxMlS5+BaqsOC52oM2+eUYXilnxMzs/zDx+B3yMGetz8WOVOIVt2/XZDUsQ+L1IgNc/8OYyCH
|
jpV3onZQt3xgyzEBMxw7qn8ED/3wJUTOrm+p17a0n53CC2HtmVdqddeNx5AtWagyEgMryDpxFtwz
|
||||||
MWOGyrbh+g0VF0kXquP1ej9xHopjgJbgXtFnElgJl7yHhPrswegybxmaoCgq2L1hYDOYHrXgXDGR
|
jH79avlujGje5M8JqfKVTiu5p2jI6zQH0Ql66vz63Sb/COh3Hub3bdN/v9G6hrW4T6k/5Nee0akM
|
||||||
vFhPLI7iFg0kTlsoI6Vg+21yHfiBm0dQsp4Q97Baw2RwwHD9KAFxtoYV8v7p1CCcY4kFnndOpvLp
|
wOUQEPtlGf2YFBkGc2EeUYfQSOciOdcwnBfxJ56zMf36HWPjtbj58cODEF1VGIT1iNmXhgbvMgS/
|
||||||
1pDGYYsbfcNDdtYkgC9PGZZu39iav2kBMM3jQIzguCTLt9dtZAxuOanP1WjmX/ORwDneI+LlUBdj
|
3ydHPjKD1axwEDaCaFQQ1Soe46sI+knzyF7HRT/9+EV03bdvYvzwYbpN/fC73/Gm64r+H171L+9L
|
||||||
rZ5WhD7hm0Swl/k0vdEIIYIMC6tcD7xbpxm1j6RnlrnZ8GVZxRkMSf3geq+9OF9vrw6sadVZ+DVj
|
XaFc8SVuzBneS/fGDTuVnPPVUsKQiSol12tYLXph1+BDd6Pm29MidhCr7je/EzuLlrctzlxAeZMg
|
||||||
vqQfrwVBVDPmHU2jmbnWvFBgJD1dLp4biuUhEZCm/FQs6WibrKKdduqxnQ2il3Qu+IyMCNZ3WpFD
|
jLR0EzG8MSeZBbFGLF7MHutmzYfgW+SETK6fLs8xXMMI2Yle72bk9d3U1LDNd29qvMJnxfUJ+YiJ
|
||||||
rEYF319MEYRDeWIPYzyhnp4uPRyhfTM7cR/JYvLwiDotU9jhukNoWV5CjlaHAlXy94TWA7ldAKrW
|
a+nHjyLE212e/N4/LeU4Ir66Hy7gPAaTuOG0jWbv8i3Q8pFsoiaful8YD1RobHf+6e8o4uz0dFAy
|
||||||
Jnt9t2vW7KO0iJHFJda7bYe5EHQd0jyYGX54SbJUx0eEcH56MMd+3cLF2yQ5oE/4xuO1nRLu9qkO
|
Kk+qMunuLexqJL98xe0qGCNWMlOF86eIJvm2OqeLfUE7KCzDJ+ZQl96i3ZI1Yg+uUac3N2gonA4Q
|
||||||
SWinVK7IueC4CWXwVxKR/Wl4hXx/CUT0iAeTeYG/RVP3y2Vkds+ZBPftu5jtifeArumBXZVPV8yN
|
qTcz0UTpnc7XqPoAue+O9DzITcoG15zhoQfPie30oppt0NYgqa+aOEww+q2zigXw4XMje3fy+HSZ
|
||||||
L85gCn5F1X6sm3W/nkQonG9LPBNbw9ZsbBXIYcyIJRVRQ32L/ND4vh+Yeb474Zb8IhUKzbbIrStf
|
SIOo+jrSw+jvvY1dZzKM7s0gQSGV/bDJnwNI13rCwczEdBi4nIB1yH7/5xZv6crCh1Nclfh8zgHx
|
||||||
w/i+PkbQKl3At3CRCqoseQ5PJ+3YfritIVe2ug/X1uP4+lQAzai/m7BufcKCC+qKydsUOWwiCliB
|
Jn7osOg3Qq3zpUn/8bdWmCtYjEex4tpOsmBNmhIzxsp+uU9sBqf3G2KxU4lG11kyJVOdmRyOkmps
|
||||||
g9Rweau4IE/4hXda+xpmdzzNIBvthzhbownZdIhL7eRnMyG40S3JPseZhuPjyCJ7voSS8Wk6iHa3
|
UVDelF2n9tRz1hdvS7dRAy47z9N/AAAA//9Mml3Purq2xe/Xp1j535oVeZOWfaeAyJuUBxA1OTkB
|
||||||
kfLr7DZr5omidqvWhoTHU1ysWpXFwDjWib2P6mT1eTAjPTVFZm2LxJqR18twhPbNDIUVw2Ivzxxy
|
RBRQpNAWmuzvvoPPzsm5NSaCnZ1j/Maca6O0am45pqAZrlsjk4PI463fRfAWpVt0bLQqnk+vTADq
|
||||||
82CzZK2OCXu1fIaD1M/MV1QRLaes/SEtIB9m7puxmU+pXMHVjgvaqJnPx7W6xUD9JyHkkfcWy9Ug
|
kwh0J9oRn4jbKRBGlxd1Vn6Op+f9J4OdeTLppRHCmAywHuDD6BjV41gA7G4nKljuBzX3aKg5DJUS
|
||||||
huVlH1hYhgHnI9V9zRBQThf/aA/8sbMBpNcrxODNZbEan6FF+OXeMS2CnbXodtNpk2Mz5rivyJrC
|
WuYqJx2aHYP8+E4EK94i5DjKm5Oz26eQS41PrTF064kola1B75wTLcxNPFd1AuGlTr0AZLcin+/r
|
||||||
g1PCoslXZipO3iy/XSFAYVQ+00s6F1z6lB3a33ce8eg8WHwNzoEq1qZEkZsfm+maSjl008efttdb
|
vAJ68aqCpp9lY76v46e2yk1CncfsGyM67AsoDcKZBr2axcyevRWM5sim37xgziX4AqKLD8hcn7Ax
|
||||||
GnId/B78oJUYHoSiWDV8k6H4HkeqfMot70n38JHbTHfM1cznq+FREWqlLth5/6B8UUG14ej4DxZg
|
l+t9qd41XSRyfQo5OW/TDA67CI9h9Eq8KVhfOgi2oUyNScjzyR8dBb5BMhLp3PK629+pDXDxuQWr
|
||||||
I02WzCY2IrdxZWaP12bsuslHrfWo2J5dcDF5jwxDYVQ+3Q28T+YgJyYAnhNyqJanNV3e04iuHxQQ
|
zdHmbMsDBj27yOmJRYTP3jGz4dsJ79TcBcnCl3cTnFV/psfSmGvqiaMNxlV+W/QywES3ugB6o34g
|
||||||
Szi/rWl5CTkc5y4i++oVhPScYxM867Khw/WUDUtTfi7IFPyKGOI6WuM+91tIzUvEoiW+D+tt51RQ
|
innt6il63HX4SasEeUCv+AjRKICFV5AxHNuarIsyg+9R8pBlCi4eV/uVDhVu6KR9bS94wrmcgUvA
|
||||||
Zqyjl0dXWOPwmit4PoOFhXe/bli126owKnFLDlrphSts7Bxuv5ZSITk8rd6ibavWyqsgh1gdi/Vl
|
SmRsJMxJvrOb7/fpvpJveBqLfQkzaS2R4g7zmnY6K+F8VxkNRLGKqda3KkwVt0H7WLYA3xZN+e13
|
||||||
BJK6284lu5bKd6B92ecQJbPG9HiHG8aQ6sO3oDfiy6oxjFJ472A89YQFV6QMS1TGqlbqyMQMN7rF
|
RN2RiuPjVehU+0XyhY+HfDIU96VmOizoteg/OXn+KBmU3wqk29pGnMyfzIbdXb2ibXDYeYO+v71+
|
||||||
Kw9SdYcvZxIp0SXkzt6YNV6uLcHKqiT0bm9ewAh36dCbt2QhqhXDgLuIBJnMhne8mysNPa4627uL
|
6x8t/M/bMlK1d8p3AaafrcELUITql1fcL4913sS08pY1X/4zqKyty9/3pY/t1WAbw0ghyCUfOU+V
|
||||||
ZUnfcLHh0v8eJDp+fnwR9giDWOsSnqRXH87P769E8hPvqVYuNJwu515Vf+q6pZtrTYb5Kx4pZMP1
|
gGbRI23hW4oWPyih02TCD9bvyOtYz+fmCgKovv11QNOw8xbeLEBTPu3f/jWsbxtdXfSWaBeAwDQk
|
||||||
R2GJ78MySIMJhiR/sCaU9bAaXiehWqsNdpaMS/Gbvh8ZMU4+zPAS2lDmm5L6yHvM/MG1GlG9kRYN
|
IYFi532IvHrf8CxOWIdq6XeBcJ0rzIdGmsE+uOzoRTzF4LMOL9vf+2BHOuFDj54rtXrZAXVxZNQi
|
||||||
guWSUL9/Bg4rn8FXzT3zrWfYjNlcVqi7zQFzFCEe1t8gxvDIhy1GVXgdFqJaMRiPk8wOT9qgVUxz
|
iVADShVYaDd0b8y7V80gv+oORa/Jqym0hicIhsSlvviOMDPOTfo9r4DdD2fMKDQiWHW7DT1OZQ2Y
|
||||||
AS6GGRDd1nnDzTDtIM2DGb+u9xNafrtCQEGvXibZeMrDIlePCOH89KCyU6V8qeneh/tnLVgwjnmx
|
kPYrqK4iB+mNxuvJXwlPeCUZDeraiLxJQ0AFM5LRKJxsBc8NoiE4qbc70WCQcJYpjg1Db5VTO3tm
|
||||||
NCW7qD/Pd8nlsGoJrdJIULfW901+224saILGH3rkPSZ5aJ8aVl3VUe7e7ZmYP/xIRkHyW0g8lxLM
|
3qzcaKRiy7fQeeF1yvKbrn792ehnQ06oCXtQH5QAXa11FFPxoF0UBliCvrxJj43dQCPoCdreD+d6
|
||||||
0LVZo2MQo8PDzLF072jRN764okAcn0Qv6Vzwx3W/grjb5izqzzRcdlU2q/Eu1uj2c6j49KCrC2X7
|
2oh9BJbzD2RhTXG/8YQeKG1VIXt7Yfk0R5sZdrdTRg3ejmDxq4KqOj0kovwp+TgqkQVFdXaCDS9c
|
||||||
CrAcFMEwn9K5AppddSoMAkq4lb90oGjDmL/q5rAGS6+i0ZFsEtKNGYrn5/KCMkIF8X4c8ynQlBqc
|
zJpxW8KlXohcFKDma6xv4V6T6Zef8NwLnQo2sWsi++7rnkx2P084J+cc6UcDxYO2WfIX6KyoKcdq
|
||||||
/r5hWFmV5Ge6moruuy5izrFchjV3cwAeAcf8/ous/l4+cyTFNMcqas7JuNLKheJ0Z3SJDjXn71cP
|
3Y/zKAF9co/UbJ4TZumuh7BuLyyA/Oxz3Kg/JajU/hrM5XCqB7cLLLhVT5RI4FnVnLUdhIs/RXZq
|
||||||
UPoqJqRLPWt8DHaq4Tx5MDf/DgM7a3INaIU9w+5l5f1MYAXsVk8SzcQddv6Fr6qaeXsWLtCiqSl7
|
HIzh3DaJ5vzQO/XQiPPx6+9FlTnUsn1u9O7dn2Egpg/kJZKFxVnirnoxkEMtvG0A0S+d8s2Xglm8
|
||||||
Gaxp+eDNtSbNlNmeC9GuGPE7vR3Q6iWSAJ2WK4w0VzsRsRfJwKx5wknX7vha2oqLklhWCKmvVzTu
|
o5gE3tmCl5DioF/yzvleWyoEmrShi3+NBZ4WCrTTTg2SI5X5XNcbCwiXZoN8Uz9j2vhRoP2A/kgN
|
||||||
AxNrsfXDzPq0S7EGSy3DZj6PzNTMC+L7iymqjW8tzM7009B/xUeASl/FzNkIj6Kr2jAFxsmHmOl9
|
aE2Yae9KgEM54oVHU8wX/VC713mi+lsM8efT0gCUZhZQI4kfeSuOXvL1P8h9/+B47sKfEpoHdiB0
|
||||||
aHgqPCvY2GhDY8lJrdn/CSk6KyTCryZ1m12shzmypuXD7GHnhi3XhhfaDnGC07tWo3X32sro/C0K
|
NybGxJoyATGidkAXvZEl2ytBRD9v6vfyDrfNKX+CsxrMwYVJFeC8bhWwGg85DUSf1R/5YKmwNi5G
|
||||||
5uwMxn9S00lw3rcW3qzhdpiewgFQ6cuYldd2Sn5+E2XI9b0n2VuiXkzvwilR8U5cLBrxB434Y/TI
|
sE7uIh6NXIOg8y4BjX8yzD+3dLgAvTceyC2nrTdko+iDR4GtABj+Gyx+ioHbS/KoJXy8GN8ntYJh
|
||||||
Cy8hwxMK+G+RfjUc5w9nOA+0gR7IdQXhnnyJZcKr4E1rSqCjYU9n6REU8yP0WtQZmwNzX01r9f5P
|
lHN6WL01PN7UM4Fr6n2QL4XPfPbukQTXa+SSaZ+4OZc1uQB0Wx8pmr2m/rCmTME9hiu6w5VZzw0e
|
||||||
uKDZ0wXmsdluuP3VZGSfI49Z1c8Z5huhM3pX9cj8dv8MZ+0izoDC2mQ3dGjChRuPFbHhZjLnfYgt
|
BeCN2wM1k88eT0dh1YG9Lgx06c9gukBzgAtf0ttrX3u//vSnaQ2KWBYZ8m7bKpvsXNk0AZqOxfb9
|
||||||
iTtOrQy/yGelNZvDTh/No/Z7rSZeVLYNuQ56rxZG6ZNLtP7CWQrPHVjuPmZ+FkeF/MqQj+LrUBC3
|
DLWrn+mBUmPRYyevEtTjih3QtTN6j/n+6QW//LDkcR6ILWyCzeado2POB0DeoZeBLUoxtZc8VuLH
|
||||||
radiNI5hjIwiHpgu3EwuFduhQ7/XarL9cikSpraPI7KULmSm5IghxdWE1euOIkzty4NToikmwC8n
|
/AW2mbr76nU9sGmdgHT38qgX7zkY202hqkv+FLy1XWWMi7+Gm3eP0C+vl9mhgtrr9kC7TbfySNE6
|
||||||
RLf02qK7eFdDNLyfxIBRKCgWvgCu7zzxzwwctFxbcQXjcZKZHUYHa/44ioniXaxhyJ73cPZ/QooO
|
EEr7cQhGfdqDxX/O8Gd8KNRf7gNXh24FXl4Pgo1p3bx5yQeAvr+LwTrc6vUvz8ZJKKKD4x0MNgLq
|
||||||
FtlhKR3NhvuSXEO9rbeEdKlnzV/xESDNjEQszdeXxbe9U4N1gy8p4pNTzIl1xNB+3I4FP7ZYq/3O
|
gmxTiQG/Dw/OibyvoKdWH1QOwz6flHXlwg3rX9R/ipPB9/GFwe95LudtSCvWBXCpJ2RteZSzV33p
|
||||||
ZlDEOGS6peuWqGYZhsA49cSXtDhcNje/h8+ZVOxw2fkN18fgCE5/3zBDWjNEqaylYCSnBwnZ5lyw
|
4JLnUnsQ7W9/CeGDO2tqVo9rTh8bLYE3NN6RT9MED6LJUmjpeREAxTX4lO6FBiz1Qb7zhhnDWwev
|
||||||
RT3G8KiGAsMUW5xrL7tElvJZKRL4iLgq3nv4xcKJeZGzCcfyHl9U3LkWVZ/bhzVrrpCD8HVEclAe
|
qzL85rVg3IjPSKWDaxCVO3djeb8MLnyEArdyAatX3IJiTdZo0RePTfAnBY+Hz4PN6LcGf3orE5wL
|
||||||
AZrPJx7AoZM2BBs3q+CXycBIqkaO5/79ttatWPmonuiR+HVkI857WYfbr6VUtp5Ds9J+rtH3SjkW
|
EiLkUBNPix7AoAg/hC/1PE2EJYBhwgO1uQax2HmbGQ598qTfvHW6VUoCP1bB0eJncnbyOgFe3CgO
|
||||||
RYqT3eO6XwEJ8GIEnCJcDkOfwlmLVkI+aV7w+pKJsDm+UgxjHBTzFvY/ABXrFH2c77Aqp3OPxoO0
|
fuv1xT49fEmfNeE5/mDGu1MDUJDKi786cJbsLAGKqX9DP+srq/n2pVygWyuUXpfnoz0hDbSc8oW8
|
||||||
I1YkeZy/+WWEQrHvpHDzueF3uc+Ayy1j8fn+tkZ6qnoYgq4j3skU+fdBVxtwcKfM/OIo3PqP6qdK
|
KZSMz7WPTPhWJkLtUPU9McSBpG4GN0OeNm3qX71kAnWXPB/zeelPoK6gRa0WzblwdYAE2cE7ILSc
|
||||||
Mc2JzyylWVqEAqiKImA6UQe+I94jNP96l0Xafi3E1g8lAJd7xKmPZrM9fgsBlW2zEIfbfKCakJSa
|
v0RnoIKlnyC3/3AwwE1caMfMVtHR0Q0uFe1uBffhKUDOXqaYWYDPIMvhjJz06uVcPIgZPMqDFpDy
|
||||||
aGcqiabY4ltxfgpw7BdMgkxmw+IcOUWRBysJoA6LFW22F6DZWcfNUl3QrLlCBtvkcaFLbQzDuot9
|
moJvfcFPvT4RWLQYz7rV+eo3H04HZ+vJ99OogOpophS1u5946nVnBZ4nYUvv82edk0T/KWH4vCfI
|
||||||
X7WUNmTpsNug1XQ1GZ2i7MJcWz41c+d+BcRt0Nn5cxDQJB+eFagNSklIUnOQ2k0vI7V0GZ3V0h1E
|
jB86ll9lB8Hid4g2vywsG1uwgl/+PhH947Ee9RDsTvCF0qXefvlmKCkmk7TZ53K4ymww9OmTqMeX
|
||||||
4hYCeGEakgKn35AbRQ7o6aQdKbV9XKybSKiQPe9+FNzJKXYMqT6iO+FFN/5XHMbUfFMYVKtmh/eg
|
gMnxfeph06yrL6+BMXqcdejXnU7d+0bn7KMVLmBy5CB37dGaEcEu4PM8YoTa3ZQ/T69MgguPUWsH
|
||||||
h7MgeSaIvW4yO9pb1pxXgJGvmnuihxprFmeWSxid7UCwdZvDRlsDCfRI15mro3PCd13zA8N6UPoI
|
TgbDj7qHbVTX5N7PBpD9Wn1C2TiHyD8Kz3iYzMAGK2yjQIvjB57fT9OER5lo1NnSYZkHAOk371/m
|
||||||
OgtJi/R7ASINIdbr8+RMiLGNlKwnGILfc+AJS3WId7HGQoGPfAnvoQRib5qMOF9sLZb5wKiJjCs7
|
Twa7jtQF8TSd6XeewS05cuHHAAJFWrOP+Tc/bU/mM4CXJs4nuk2gaq3XJfW3PfJYsfW3cK9LA0JK
|
||||||
xGpUcDBXDAP+cIZbw2nW6TbY0KflC8ujlRSz4Dqlqm5YxcxjTYrVcUodTq05EgtlZiGS7hqA5KqE
|
oWNhBG8X1gc1IMoyf2DqydR/9c9Bd60e4tsx+85rqHVuYyy+3yzRFn4l4CHdPJofA6gQrFWkOt2d
|
||||||
CqtcN9ztUxPeQoCJdeQaZ/DSckDWS2JeAcmw1ds61Q4/6U2VYCnReHnqrXLNnBsdi+e+meP0IKh6
|
mlVDGalzfmmpcbDFegR4yNSX1wHkuW2HubC1O1DMwYo6c/IG1Mr3FayOVoocoTnWw/5OXXB4PRSk
|
||||||
I76ZwbeSNaVelKvbawkEh0k/rLeP36PutArM+AifgeHEqeEU5RfiyZ+I01F9YCRmB5no9RA161sQ
|
C2e/nhd//DsPCAiYMb9y7wL1hzUQ9tq1MS1AEkHgHAf67U+zluQvOLbXA7WvmRxz5hQpiE2UBLOD
|
||||||
ctTPbsX8Il6HeVqKDNQdHejWe70bNmDxAvVJmdhhr3vW8rkNHdzA/6J14ruEDxguKACWYqV4e8Uo
|
LTAqYuJrvuIe0aJXmAA11DVHS0/UqfCuni6Xn0xTIxxTr7088yH2fAJvOEmRQZtTzW6SZUE+Wzui
|
||||||
jmKkGap6ILphzWjEO93Upr17Zu6jM5LZws9cu61FwgLXe4UsrUYKvd9fiRf4Zz5n9cUFULFOxatl
|
xfEOL3lOD406yInaVgp/09xawZdGHbT7EY+cqW9/AIs/+OpTLF2Lyfrmu8gWgk9Ol/wF6vubiPa3
|
||||||
oKk4rT9w0Sun6pftms5LJAF0NOyJ3+0PfNYuMKO3SV9UOAdBIgVLraLutArEK5XvMIbfsoQ4+W6J
|
6phPdxVIKqjSC3IFdQLzPthY0O/MgS7+siZAvWwhEFQTBY/pHvOuuEsbeFSyYDL9A2bh5NhwPKtt
|
||||||
McmHcGlgdFXUBBnR6XtBK9EUE4rvcWQEOZ9m3Ea+Deza2cQwLo9kST+kU5K1LrAQH7xhceu9D4ak
|
MAk7O2ZLvqUt/RA5Jtsbv/lcdijf9HDEUc4uu0kH9VXOCCsUN1/6sw+q1BypXpVVPZtzmYDlvpFQ
|
||||||
frB4sfxkHX9xrn0MfCJ6/3astX20PqDk8mG4M+JipnqtIkNAOYUxDgppvmZHNKz6xMK7Xzc8xTRF
|
cFaAdOG1gIeD5QYwrHNvuDX7CxTheCLsIs/GmO5hA87t8Ca84H796wfHg2h+86F6PAqrXv3eH8fb
|
||||||
y8/T6cMbhWGSD98SZmIGeLvaeUifyzYFUb+llIfKwhmJyxa1H7ujAlqjZJXN/QrN7m4z3NismV5T
|
9niKr0qqfqySB5vjZutJYzXa4OA9BeocuqcxKj+Vqp2M1RsdbnsvJ1+/MsCYIHR9KzlbeyyFp0sx
|
||||||
9VOjcN0T77z5DbM++6b6EjHHQu7poYSrKUKHwhSZ9fo8+XQ516omvN0PMWc/DCd7eeZQnB6MEMWX
|
oyXPwAv/S9op0W0UFO6nJqvk04Fl3kSm6tB4k5jrHTBP4Yeix/bqMZMr8L/+dVoZNW39LtS++fjB
|
||||||
C97yYwx+F60kkLrHsO7Xk6SdSuwRktbfhL6p0qIN+ISKytCGS3FaeySZ8pe5S34LF5wrOuqWrCae
|
fso5ay+rDkzzMn9I9tt84FUngI1h1Qht6qPB/DKNYNKrFPnL/9H8KKIKPt2toi6Eei5lEfehR7SY
|
||||||
9LMaujezo5bH5pbY6rAr1iHELSLbCyUGk/WQqW0mIjN+NYQcm4O1nPwuBi53jDjkUQzvXt5KKBre
|
+Lv5h/Po+enhMj9B3gFnnE3wmsLxDS2UVvRgsFChErgC8BNcTte1wc7tkKqYzw5F+32BGevDHpB9
|
||||||
TxY1hRlK2ZVHcHjtbtNG8k98xp/9D2bPFAgBp7C45N1y6LaZS3I2unw+npmE6hOacCZqG2tO49FV
|
0SHvbPmeHPdpBp0kKslTfc/eKKw0CM77tiWfYrepeVq9Xl/9W+bBdi1868GxqU1dSRfwuOJa+OsX
|
||||||
5SfeM3Kuy2Fuer1Dfp59iMvyKNwuLykH8n01dNT2a8H2EtZRkisjfd8TpZmtqsNqobkWCaeN3+xc
|
PWbHOYll+QL/fLcC/v3X33//z3fD4NXdynZZDBjLafzn/1YF/slu2T+CIP2uIZAhq8o///rvBsKf
|
||||||
Oc61OvB8Ru75NmTONwK1bJuFWfomCUcx2pXw958/f/78+Z9//vz5P//558+fv11/r95///3zd6qW
|
D+5en/F/x64p38Off/0tw99dgz9jN2bt///8r+Wn/v3XfwAAAP//AwAoZH8C4CAAAA==
|
||||||
6b/sAum/8nv+X6IoWRE8HfO6+vuvC2H09zv03Xf6f1PfVp/x779/dvCfYpv6KX/L/vPnz//88z//
|
|
||||||
GAM=
|
|
||||||
headers:
|
headers:
|
||||||
CF-Cache-Status:
|
CF-Cache-Status:
|
||||||
- DYNAMIC
|
- DYNAMIC
|
||||||
CF-RAY:
|
CF-RAY:
|
||||||
- 88142860ce7a82e2-GIG
|
- 89de8402cc8ddab1-MIA
|
||||||
Connection:
|
Connection:
|
||||||
- keep-alive
|
- keep-alive
|
||||||
Content-Encoding:
|
Content-Encoding:
|
||||||
- br
|
- gzip
|
||||||
Content-Type:
|
Content-Type:
|
||||||
- application/json
|
- application/json
|
||||||
Date:
|
Date:
|
||||||
- Thu, 09 May 2024 19:39:49 GMT
|
- Thu, 04 Jul 2024 10:43:40 GMT
|
||||||
Server:
|
Server:
|
||||||
- cloudflare
|
- cloudflare
|
||||||
Set-Cookie:
|
Set-Cookie:
|
||||||
- __cf_bm=JUH9MsoOBdCquA9K9qOgVbrLUkBpuF4d2k7EvjnBOLg-1715283589-1.0.1.1-96.yHqUmYzRiWC7xZ4.TPKu5RqvB1EFecasHfb_ix62oXcsw_Sp.gz0gDk4Inl119IxRMINM613CI5HheoJM2A;
|
- __cf_bm=4yTFGytKZgiKE08enzMel3PhLQLu9mwQY7gk_lH43Bc-1720089820-1.0.1.1-4V06JbYY1zuYkbV7SSudBUPyqHqm6yFze0ufE2rHtJTtOmU.XLh_k1M9h.a.oVJGC44GhO25bv5s5224Ic.p6w;
|
||||||
path=/; expires=Thu, 09-May-24 20:09:49 GMT; domain=.api.openai.com; HttpOnly;
|
path=/; expires=Thu, 04-Jul-24 11:13:40 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
Secure; SameSite=None
|
Secure; SameSite=None
|
||||||
- _cfuvid=DPNTnqcoTitPoe12n1vvVltuYsIzavX_dftsSEhQQcc-1715283589489-0.0.1.1-604800000;
|
- _cfuvid=04ERWHryLOXBKqyH.MpOILpw8OtW9Tchzbutsc9OJnE-1720089820726-0.0.1.1-604800000;
|
||||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
Transfer-Encoding:
|
Transfer-Encoding:
|
||||||
- chunked
|
- chunked
|
||||||
@@ -181,210 +179,207 @@ interactions:
|
|||||||
openai-organization:
|
openai-organization:
|
||||||
- crewai-iuxna1
|
- crewai-iuxna1
|
||||||
openai-processing-ms:
|
openai-processing-ms:
|
||||||
- '15'
|
|
||||||
openai-version:
|
|
||||||
- '2020-10-01'
|
|
||||||
strict-transport-security:
|
|
||||||
- max-age=15724800; includeSubDomains
|
|
||||||
x-ratelimit-limit-requests:
|
|
||||||
- '10000'
|
|
||||||
x-ratelimit-limit-tokens:
|
|
||||||
- '10000000'
|
|
||||||
x-ratelimit-remaining-requests:
|
|
||||||
- '9999'
|
|
||||||
x-ratelimit-remaining-tokens:
|
|
||||||
- '9999946'
|
|
||||||
x-ratelimit-reset-requests:
|
|
||||||
- 6ms
|
|
||||||
x-ratelimit-reset-tokens:
|
|
||||||
- 0s
|
|
||||||
x-request-id:
|
|
||||||
- req_ace5babe6674a08d07fce90792eef7eb
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
- request:
|
|
||||||
body: '{"input": ["test value"], "model": "text-embedding-ada-002", "encoding_format":
|
|
||||||
"base64"}'
|
|
||||||
headers:
|
|
||||||
accept:
|
|
||||||
- application/json
|
|
||||||
accept-encoding:
|
|
||||||
- gzip, deflate, br
|
|
||||||
connection:
|
|
||||||
- keep-alive
|
|
||||||
content-length:
|
|
||||||
- '89'
|
|
||||||
content-type:
|
|
||||||
- application/json
|
|
||||||
cookie:
|
|
||||||
- __cf_bm=JUH9MsoOBdCquA9K9qOgVbrLUkBpuF4d2k7EvjnBOLg-1715283589-1.0.1.1-96.yHqUmYzRiWC7xZ4.TPKu5RqvB1EFecasHfb_ix62oXcsw_Sp.gz0gDk4Inl119IxRMINM613CI5HheoJM2A;
|
|
||||||
_cfuvid=DPNTnqcoTitPoe12n1vvVltuYsIzavX_dftsSEhQQcc-1715283589489-0.0.1.1-604800000
|
|
||||||
host:
|
|
||||||
- api.openai.com
|
|
||||||
user-agent:
|
|
||||||
- OpenAI/Python 1.25.1
|
|
||||||
x-stainless-arch:
|
|
||||||
- arm64
|
|
||||||
x-stainless-async:
|
|
||||||
- 'false'
|
|
||||||
x-stainless-lang:
|
|
||||||
- python
|
|
||||||
x-stainless-os:
|
|
||||||
- MacOS
|
|
||||||
x-stainless-package-version:
|
|
||||||
- 1.25.1
|
|
||||||
x-stainless-runtime:
|
|
||||||
- CPython
|
|
||||||
x-stainless-runtime-version:
|
|
||||||
- 3.11.7
|
|
||||||
method: POST
|
|
||||||
uri: https://api.openai.com/v1/embeddings
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: !!binary |
|
|
||||||
YXSDACDztvb89z8/k9FeZ0rVEhBIuDPopTFBQMT3fiOISifSJEBqlQdcfLgwYYhq0ycWOOTwIO1h
|
|
||||||
81r/88/f379D0VTl8u9//v7t6nn597/++fv79/lYHv/+5+///PP39/f3P6StVV9Uz2f9fVcg2EV9
|
|
||||||
/vufPz418QbBf/7+TVtk0UC95xN7n/0a8PWNEcnICNjKbJqhe2gQ9Wr4Ayx1fzE8z0dC8Rr2gMLH
|
|
||||||
FcIu2zgCDvBbsI/U1ODVDA+MzOeXsVewcrAIswt25vo6rW2Gcinhp4bEvzc/bfr7kYK+dDWqR3IE
|
|
||||||
1ruc89CtpJLeh3xPyOkbRWA+zQG+nYE5bZoXxepzeYTUFCS7GW6dZQGJPC8UO20HmGwYqRrLTUm4
|
|
||||||
GHsNIec4h4P6OWLz6zRgsz8XAqtlMKjzDUtzAy1XQ8s0eBpeP0mzTtkvh0WYXtB+v61gu3O5AXdN
|
|
||||||
geiYPBW2mMc0hxUqFMInt7rZXHnJlCi57dQR2GnalFpHaujICvaAzwB15s8IKa0stGW+ybZ8NCP4
|
|
||||||
jBUJh8Jgsz3J5xUON0cnHPKbhF0KjVc/3sEna/oixfrVn7ESzsmLLJzTsf1sYAsG0VGjDrPbYuOF
|
|
||||||
HUJdH97YnC53ky2XkwHw8/bAzkKMYjGP6QNIZjTgsD7ObAHnOoIWi0aMlcMWzHeITqCSzwM6gsZK
|
|
||||||
xO4k1aA69SLVFtk2Se33FdwyG2Ebb12wnTvQw68RACTd42ewNt/VgHSkF3QctpptEbgoaq9/ZxyK
|
|
||||||
ij6x2y458KRMV7ROwZtNwqjx8O09ABHn27vY5W/cwvvX3vEJPeaEukYcqkpr+NQR2NCs1rqV0MPk
|
|
||||||
Q8+HXJ+O4Wr5sOJtmcjnOjEFv4Uh5Osbw37H6Q0rBc+AXHb9Yf/0aqfV3gMNZlS5U60d4oAJRj+D
|
|
||||||
Iswu+OxtXrP/hEiB3XZJkPQ9SAnzaLjCFPEVxl48BuQwoxGey+ZK1I4bwaYfSQsX7FbYFl6C+ZPP
|
|
||||||
fAmokl6x6yaoICD+VHC5HyiRw+hWEF4eWmDV3AWpx/Y1sSVRU/CuShdXjVAl+/KRU2C3WCCAfDOw
|
|
||||||
b60fQ2UMvhivsQBWiVkzJPLqUP325dnqbhIPL+qsYe0pfKepFHRN8bVpobrvOIl4zeNWjZsd4EDs
|
|
||||||
STDnzZCB72U/IPZyw2Y9em4Jiuf1QA0sGia/k2iA3zAVsdO4Avjxws4dmpHMNORL2Vw8OfGhey9T
|
|
||||||
XFEZMLb99FIt9iIkR2s9TNvunXJ472eRCPWwmJsS5gSO58Wn1ls9NsQdeANOpnHGVmb3gDn6aQAH
|
|
||||||
5/vCxrhHCbVmxsNXkun0nP6UZMkPbIczjxHZVEubWIM8BXagQkTUnE+zVNm9htETpeh1O9gFO5nT
|
|
||||||
CrOHcMZ5o+zBEiddDwE9dsgf0TNg/t2w1Iq3ZaofpGRiTf3mofvqNuy6CSr2w8HjFaqkV6wfydec
|
|
||||||
b/JkwDHvikXAWxcs83Z3oHf1RyRdzxn40mQ6Af0CZarxvdYI1R2J8HiOI+zi2G6ONz98gNq2L9Q3
|
|
||||||
vSn4HZEzQrNxb9Qev21CoenNgCivAhvbK2CTcP0psLbtC1Ln/h1QbRwVKDXIJoxFa7L7XJyqbJ54
|
|
||||||
bFyfB7Dp70cGeD2zsdv01UTUhBlw/3wINctWZcNGYQ7D7GRQ2/g8APtIzRvaSeiio6T0DTkfLrOa
|
|
||||||
WvEXIz+KE7ZcTgYIt2VAe734wTSCzwijJ0rRVsyFuR0/ugj9ydOpcb9FbLZ3U4Pn+XQnLZfZ0xJE
|
|
||||||
1whGquBgfzgCkxbhM4U2E2WqvZsJrHS5xPAOdoPanECKfQoLTbGG/E7Tcq+TldyGFX5eWkaYXb+n
|
|
||||||
fT63LcTG00diGBFzDYUiBV9HIeTNnSuTnq2Ig3GzA+rEXBz0+f3OQaFcW2yG1yAgfN/l8PmFFq1k
|
|
||||||
z0m2lY0G/O2igmBu1dP65G4WrG33Qi23UgoyN1cLEARzxBVdDbZzB1ooMvdK9ZQ7BtsROQPw/PiO
|
|
||||||
pC7oklVklQJDR1aoVXtWcRwjbYSTaZypN8RNsaltm4KYizE9a1c9YcJBzuCUV1eq94nbrMfSGME7
|
|
||||||
USIidJGRLI/Dp4em6Sn0fFeWZL7LkQhFt43pK/UVQL+tvMP4/j2QJ+fYbMXN660cE23HdtX0YE2l
|
|
||||||
aQQLvz2RUA9LsB9+90xxToqN9ubQgSn/oBKO8X7D3vtemEzTkhI0Fhdgb/l1RXf9BRI8XS8rdR6q
|
|
||||||
l+xqAgxoLlGMhK+sBZsbDi28Xj2MA9uCydKS0wpd1Js4FAabHYVZPEHffrwxcoIwILz87pXp3Fr0
|
|
||||||
Bog8/dLxO0It4D5IPpetyeahjABo2zuNr88D2GMDxsr1EXEkRdq72Xl4VUCD3waR19tvmiezR8Ac
|
|
||||||
mheSNmwA5uinAW7vwwuRB3qzOYuOKcjFl4ZWb9/Z6vrZDLcAqeTI2nOwKk9fg/ztVWPnkB6DqVNg
|
|
||||||
CdfumGPtKXyb9d7KCsxE84j9q9Ka223YCGgEOcOBnO7JmiHfgOHU+tgE2G32j1dKsIC9iy1AomL1
|
|
||||||
he4NR+as6GiiwSRhIs2gOB5TtIm3zmSvz6+C909M6Pld182+fOQUys2a4kc720xcpV8IldbwEddQ
|
|
||||||
Wiy7p+UQS96JekPcJO1OohHMjSKQY1HuATM0PYbTU3mS9QnEhjXSJQUogy6+/PSJjQYwNOhw3AG7
|
|
||||||
tXdu2HLRDCjk5kbPl1UDAvXmFZ4EfEarNDlsTyxbhMLiqNT0FgH88tGMoXMPK7TPZjPNSQ18wER+
|
|
||||||
xeGnuwTb9WdKsDqTDntfPTIZXO4nIJnRgANLdoMt+e4plJKDSqQN14BF+9OB71+aEw75ZsFu++qo
|
|
||||||
1fBO6Olsjsm6W14IN3sVsKWU6cSm1K3hKW0LUqPX21xfQ5rDJvpsVNu4IKEZn/DwJTs11p5AnPa5
|
|
||||||
uTrQX0NEz6N5MRfxJUVQtS0b3881M8mrrGMl82ueiKfsaRLrC0e4+iOhlngjCbsyPwL6fPRQa41u
|
|
||||||
sQ4G4UHwriyirGuTLFV2eUP2Pc9oAsdPsOpFcQKS585IUNaTuZvqIQUV78r4XA5bw85WBOF5vxyp
|
|
||||||
Zn1ebHfUB4RbZiOMnGAOFv7tV5BYjw8OtmthUik/GyCqORG9h9o0j44pcJDzdxOHsVwlM7+CHWRj
|
|
||||||
ecHa75Ca7H28Q9D1hUF4N7GDdbjWBvTH1cauGNbJHma9Bl3Um1TjZ9osz5saAxe1Jk3J+QM2X2kJ
|
|
||||||
iCXRxZ68GEDs3tYOP/wu0aBRQLG2P6+Hj0vdUof/AkbKrXjAblVu1BK0udjPVz8GnGNGNHiGx2Iv
|
|
||||||
t+IBC9i61ECjUGwKRgZ87vyVbNq6mFtphKt8IQ4kt1JSGmrpDxHkb/LCliXAqRjBb4T+uNsE9qIV
|
|
||||||
7IfkLcI2O1N8YtK7mXwHSQAdexl9T60+rXKxlrAWbi5aoCIHs8JXMQRppOFI2gTATvc0gkJubgRM
|
|
||||||
8t7Qm2/l8O44FpEavglW9RRJ8LnkIbnqlgeW2pAhfOhhQB1mW8l+mNGoSEmnox72zNzEY1hDfw0R
|
|
||||||
NvmPOW305Gnw2/MBtt19AUsELpLq3suUOlq4TjPy5RlG8fikesiPzf57vXPgot5EEt5IwgKr8RUt
|
|
||||||
43ckcWNdzJ+rq8EwOxnUbQov2X7PrIbsYmFyYJoM9mMU1Sqk5IUOoYaSFTevN1AEUyIM4O/ETvcy
|
|
||||||
gt85TqhnNwXYm4qFsA9fkPodp0/bsKkl7ETDoUZpC+bWl7cTEOLuR13ldpw2/SM68Pt5mPS8Tk4h
|
|
||||||
7vK+w/skdmhSJj5h+ySusICti9MDy6YV+dsMy9u+USeWTLMLlccbtFeeUcceJECCw6QA+JiSZfbi
|
|
||||||
N1tDoUhBQoCOxPM3MsmzhCVY5DrCzn1apvG+lhrIroZHA+0gBtMzNE/QTdMOqSxtg309ayGUmtCm
|
|
||||||
l58+AUrHQw7Dj5bQgDdfCYm0+wlUA8DYZL5eiBe9JiDshRs9/aJpWnhB4cC77TrqpZhMmyR/Rzjl
|
|
||||||
1RWbiDuBvTzRGT6l+Ek2J7wmmxX8ELwKzowEtQqLfa1XArP1SwmXwy7YXHHVwO0xu0TCGyrmmzwZ
|
|
||||||
MJIMA7uGAZK9/2QZvKizRrgqcYOtuiMehJlvYMzJtTm5Rhyqr9+zwucmcpojdqAFKxUWSEKPsBC8
|
|
||||||
s+LDNrhW1OAE26Tv4wWqVm5p+Ho5LxNbLpoBUQZdfKmRnmwz7+TghWKeyMxeJgKgJEGgLCLVLtsY
|
|
||||||
zOaTi8D3sh7o6+SoybJ6jx7kVxFRv7bWaX18vwiiYy9Ta48Hc/1loIe3B3Gxl2LSEKK8fDg3ioA9
|
|
||||||
yxkBtcith62wR0joF5ZQJ9tO6vnys+m540awvG6TpXBraVDTrt/TjkZlhViNjtQMC8XczioWoRCW
|
|
||||||
Aba4rGtovPsSKFU/I4CUn2bOomMGf7cxoo6h8NNmfy4zvEfcFetirSTduQO9onhvgTDmf4r5ezvv
|
|
||||||
wLiNR+o2PTdtfH1YQb8dazSrSxisXVoPwFuvKfracE6YM39GcJsWgQYvQTB38NVy9WBLPq2swyvZ
|
|
||||||
uvuHB9S3XhiFkjuxqGxbqMX2gXox64NdiMYZ1q/DhxqlfQ22o1Q5sMs2DgmlXSX7Ra8JxJJ3or4k
|
|
||||||
/KZZYtYMH1ulYzfWPEZO3ygGgVZ/se7o72CT5O8IPKXG5MDJddDtnpZDq4YXqlmfAyO/sAuhfmMq
|
|
||||||
DfrhMFGO8jHk6xtDwg1s0+8Cp0q5+j+FOp/RAhsLBAJ/7Cxib8jjZFV2pYbiTZlI0QVkYrH3QvBl
|
|
||||||
EhXB8+0bbJy0OfBDl4k65X1j++F3z2DjM0w9PXeaNQNohYJrmNijmpuMfKeP6sKzJ+Kt0S343dKR
|
|
||||||
ahzblBwr65tsu6flEPDSjiR3PwZ7ZKISvvgbRTdfj5s90u4+3AH5kd15nxgZRDhIRza1OBQVvdmN
|
|
||||||
AYlw2y4S4dyamNtUvFu4WM0XcZuPTcZZSQwFd8nJfNK6gJFDloI+WL/Y6WgQsIWzdsh1cKGGURjT
|
|
||||||
b3+XA+hEWlCzU27B3glqDN9V6aLve5+mPb9fIBCNk08rdM0n5pDzA6hX9qBWUt/A4jX1AE6twqPd
|
|
||||||
3K1GSGKdh3b1TLBh9EcwM/uxQovFI7VUXwKzZt0dMJ4Xnwbcqkz7tXAVeA/eIdZecT5tQXSNQUJq
|
|
||||||
mzoskifyDZIReBZ/wd7kiMl6l3MRrJ2QU1ucdLAVkzGCkzf72Hqrx+YbrtYJ1p5oIQYekM3olQ+w
|
|
||||||
THSOOvfpPO1VkRMAfNvCWll10zK6hQjIQW7w9zD40yCuvx5+9arH+OSoxcJJsgOO6tvBwQF+E7Iq
|
|
||||||
IQRh5hv4fOogY9+L2AJ/CnQEjEwy55WMPLiD3cCm1HtgO350EcqDZFJ/3sJg0z+cBWB3Gqivq1VC
|
|
||||||
63DNoPQzPRxK2RjM1iqX4E32N9WaG8fWi3h6QwrMkXpfPQrmgPdWOJ17C5/Tn1Ks1NRFaOcDxgjs
|
|
||||||
VrB2txeExfN2QEpKjgn5sDaDckVdMi2+Dfbw9wmhQ4Yen7WrngyMjgjOv0xFs51Ttp+vfgw5DYVE
|
|
||||||
UcKpYdD0ZgioH2HTOWzTb/XFVIWRW1KL5EeTqL+pVsB4f1JHb/aGSBwRYZmYHHbvBz0QnjchAqdW
|
|
||||||
4ak5XeRgSeA9AvAR3ait44O5/JhrAOPYpkjZz3wyyNWyw0ueFdT0ZaM5VudyhqfD44TPl1VjQqY5
|
|
||||||
BkjulYxmYbDBT6l1pF5UolEDhm4h1n5fgWu7JwThj52Q9/5YwQtFPPVZmxVs7GQfVsnLpbolsKSv
|
|
||||||
9hnCLkA52vYnbzLrFqTyhfke2TLfBJvduhqkvHJG+5ocTBYQpVW6AOWInfcyYNDqSuWu3y5oc/M6
|
|
||||||
WH8Z6BWusEp6EetHQd77Y4fBu7LQMbMjc/7q1xgogi5RhGeloTNkCNajsOMzv87T7O8SAqy8Fvhk
|
|
||||||
Liv7PbWfAWbUYOoOm8F+7xwOwM5HTB3N0aetNjYOikvGsKfnzsQ27boq9/L8odbDXwLmGnEI7o5l
|
|
||||||
0Zgcs2kKrOYEHxk3U4fZbbEgMFuSk74mImrOpyHgq+UqUKhIPuKtC1ZWfjWlrCeDGl1Bit35jhWU
|
|
||||||
GHgQ+dO5TBCCqQbW832i1+Wtgr2qkAb2goVkvcJDwaRFfSvGPJv4OffvYP0MPoL5xS6odioiIDms
|
|
||||||
bGEfPiH2BHYBu9AXIXjo4E0OTmsz3n65IizUpCSq9fg29IOPPCyN9EetpL6BPbHOoiJVvoSAWbeM
|
|
||||||
PWVXAr/zCSABb53J+mjRIE25FbvHX1LM5yCQwGavAhGMYgL04vwUADBi1E0JMren9tPA7N3fpE9x
|
|
||||||
V+wGMDR4pamPul9xbJZ8DCLgg9cD7ZzQmWuKUgRd1JtISsS0WbH4mKF5TgVclikXEMgZFbxd1ZSe
|
|
||||||
7rFqrrnCt9B/f33EOuVmbgm8x9Bf5QY75X0DdD1YJ+C/O59qidkx1nTZCdzBblDrVp7A+vh+Q3gu
|
|
||||||
eA2jUHKnzaqFCpo5dpAk33Eyj9Mpg4aqldQWXkKwtsnFgt9PbtLX0m4NXX/FCFbNOVK/uPQmW3/F
|
|
||||||
CD8vI8N6Lp2CjVQBgU8pfuKwF61gfz+ZCPrS1UijmArb5ZiNYGmfFsYn51kMKLyWMABZSlRIG7ad
|
|
||||||
VVcEj63UCQBH3eRprmQwOncR3fDOpeSQpRC0/R2fNlyD5XYV3xB/RYoO9TEL2KcGBgQKFYmcSXOx
|
|
||||||
QdmMYGRkFtasbGx2PnZKCNJIw8nvoCabEkYzbKJmo9rxwab5dFIg3KNawxbJj+ZSU26GW0diio2O
|
|
||||||
TNs1bTmo3AWE2lehmccX6mN46GtIrfY4NOswizEAvLQT9nLnSewENYbbdpHwWbvqCV8nTQQ8u+nx
|
|
||||||
qa+/CVOWJlWH+IbIgZMNU7jptx6iD59Tz6HDxPr+WkOJc67kfRWjYjBP9g6j5LpT3xP7aXcuG1Qf
|
|
||||||
u5eR9QoPBYO39wAprSx61ve9IEJUE6XVti91GldgzJzqCNz164VqVuY3K+RDB67+SLBxjcdpS75K
|
|
||||||
Cpa7SmlojMHEb/ngw7U75lg3qy0gl0k5wdUfCbbVai6W57hW8PY+duTdDnvAGuQpoPEfCXZgnwTr
|
|
||||||
R1UIhMJLp76uckmrBg4CE+l2bMcjNy2Lx3Hg/okJ9kekmvQVrBzwHu1ODj4FwfyGHx9+9bKnji8H
|
|
||||||
jOW8PsDfYuUY4Y9dHE8LQHBGfoJPDW8GZD7PvdJ0vYq4ojPAnlg2D50fYNQs2yfbwVd7qOTRcPT8
|
|
||||||
ro2JTGFhgK/hAWo3BxosF9hUUC/rlXox600m6gYHUAZd6m1kmn7guM3wG2Yi1Q8Sa1prlUsoPinF
|
|
||||||
56vgBnw7cQqE9jnAJ3F7mCsSdB+q17xCnNIfiwk70IFDCWxqSr0Hjqn7i+BYmQ3hcmgHLN4NBaCZ
|
|
||||||
DBiFXNas4vrrYZp2GmKHdAHtaygf8KY1IWK/g1qwpn7zAEZuSZEThAFjWtDCWri62JMlydxTY7SA
|
|
||||||
Z3967K4AF8yaGS/zYXeh5sbx04Yu6QD3oGTULy69uaqBg+CC7YoiwXqbzMlkH0buHtKgHw4NSyu1
|
|
||||||
Bvcsf1L99k3BJsl0gIfpYOHw3stg44Wdg+87Tal7P3zM+cm9HGgP3ECOvr43lPwWER704Eaa/HVr
|
|
||||||
1sN7fis+sCq0TlkWLGmSPGAjgIwo8xYGpPd9Bz6/0MIes8/NMm8XC9YHhUPNFPgmL9haD07XZMVB
|
|
||||||
lbfmfGnsFHTa8MFh6m8TvflhDub0smHzXRGwV0VEYCndG+pNTpasNFdS+EoyHWtT8GZL8qscwF3H
|
|
||||||
FKksbYOfgpEGbbLNaG3CX0NP9zRWtUs+Yudl4Gb1X6oGnst2ok6bewHb1HcORLeNkVhBbTryU9vC
|
|
||||||
63qTieRfHZO1cq1A8GpCtPlOn0xpqYXwZ3QW1o8yTAhrJAt+DQ/g0y8Kms2Rkh2Wg+sTbiL91E9h
|
|
||||||
YcATPsU0zWwHrKIrSPAOdgMjPCsTsaLhAWaJ03EQfKdgU8J8Bt/Pw6TOQuqEdMEiQmvI79gDVsto
|
|
||||||
uq0ERO4a4udZpICcD/cZHkB7IjC3jGbVrIujzr9MxdrhPjJyGu0W2AMcyFsf+oKpZCRQtS2bYgKS
|
|
||||||
aRsePwR+7CxirfwpgB5I/lbRsZfxySmuAVEToIEyVN6I659JszqsbMFzmO7YPH97MMjbm1PhashY
|
|
||||||
Ay+ZTbcXL8Hbg7hE0C0PiPWutWA+kQBtR/I1mXb7Emg96xPGMYmTfT1rCAbr+sQ5dJNmC6JrDHg9
|
|
||||||
tRHdfje2EmvpQbdKN8LsgU4rv4IVmPVcYFfrxmJVdqWG3vU0YuP35qfdjPYeBFlyoi5JHubmt3wI
|
|
||||||
OufVYWtMIrZ+R41TVV/2qC7YXrJb0fAANvdYqZXUN0aD4piDf//5+/v7+99//v7+n//65+/v3354
|
|
||||||
Vt2///n7d6m25b+rvqiez/r7/u/H8/HfPC/++1///P39S+bHu/r3P3//88/f39/fv79p6H/L/7cM
|
|
||||||
bfWd//3Pn/hf//z9/f39uwzLo/v/lqGtvvO///kT//n7+99//vcfAwM=
|
|
||||||
headers:
|
|
||||||
CF-Cache-Status:
|
|
||||||
- DYNAMIC
|
|
||||||
CF-RAY:
|
|
||||||
- 88142862e9f182e2-GIG
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Encoding:
|
|
||||||
- br
|
|
||||||
Content-Type:
|
|
||||||
- application/json
|
|
||||||
Date:
|
|
||||||
- Thu, 09 May 2024 19:39:49 GMT
|
|
||||||
Server:
|
|
||||||
- cloudflare
|
|
||||||
Transfer-Encoding:
|
|
||||||
- chunked
|
|
||||||
access-control-allow-origin:
|
|
||||||
- '*'
|
|
||||||
alt-svc:
|
|
||||||
- h3=":443"; ma=86400
|
|
||||||
openai-model:
|
|
||||||
- text-embedding-ada-002
|
|
||||||
openai-organization:
|
|
||||||
- crewai-iuxna1
|
|
||||||
openai-processing-ms:
|
|
||||||
- '22'
|
- '22'
|
||||||
openai-version:
|
openai-version:
|
||||||
- '2020-10-01'
|
- '2020-10-01'
|
||||||
strict-transport-security:
|
strict-transport-security:
|
||||||
- max-age=15724800; includeSubDomains
|
- max-age=31536000; includeSubDomains
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '10000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '9999946'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_49a762eade78fd2313d74a32fa22ead7
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"input": ["test value"], "model": "text-embedding-ada-002", "encoding_format":
|
||||||
|
"base64"}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '89'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
cookie:
|
||||||
|
- __cf_bm=4yTFGytKZgiKE08enzMel3PhLQLu9mwQY7gk_lH43Bc-1720089820-1.0.1.1-4V06JbYY1zuYkbV7SSudBUPyqHqm6yFze0ufE2rHtJTtOmU.XLh_k1M9h.a.oVJGC44GhO25bv5s5224Ic.p6w;
|
||||||
|
_cfuvid=04ERWHryLOXBKqyH.MpOILpw8OtW9Tchzbutsc9OJnE-1720089820726-0.0.1.1-604800000
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.35.10
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.35.10
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.7
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/embeddings
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: !!binary |
|
||||||
|
H4sIAAAAAAAAA1R6WbO6Orvn/fsp/rVv7bdkTth3zDKZIKgLu7q6QBFBERkSIKfOdz+Fa/fp7ptV
|
||||||
|
JeAiJs/zm5L/+NefP3+1eV1cx7/+/vPXqxrGv/7Heu2Wjdlff//5n//68+fPn//4/v3/niyavLjd
|
||||||
|
qnf5ffx7s3rfivmvv/9w/33l/z7095+/GpyZFHdumi+T7J9BWuUz2eglAVNAwACtq4mol9gfMC8P
|
||||||
|
L4b7bksoTj4NIKfmJMFNq2gEdsM7n+/7pAKXnMuwr+zfbAmzSIGHUogxqi+nfj7co6MUmFtAiuLO
|
||||||
|
9Yv9Ua7gGt41GjyUCCz5Q5LgDZdXelGWhY33exkC5zUF+JhPJmBjV8ZqPhshdVmD626vcRY47/YH
|
||||||
|
Gj5OLzA9iBGpQ15fCZxKrx7zJL7CVORFbDRcDVizzARytmVQbV6u5tQgUsH80HAUx2ZSL71zucJR
|
||||||
|
SUOknoUJsOLYGtDX4w2aBgmYwzV7pvDAA5ksD6ViLCj3sWKm24Xun5bbT9pLRyp5ygrWdzcuGM/l
|
||||||
|
o4LcWbHQxEsmmw3RPML7zpGwP1xsxl5xOME4P22JqI11PWVFxKn80ocEhjuSz8F1RMprxBX5iOBV
|
||||||
|
z5VHLTiflC3VhNMzX54npsHzPS2xd58u5sLZqQ8oeGd477dGQIrhScBZaD84SMKxHtswDqE+hh32
|
||||||
|
5nzOKQ6IBbhxHtHSH6yEu7htAbaOL1Lsx3ZCGl8oIJlGhH0KX8H0FvoGWoapoq1Nb8HcwPsGyG0Q
|
||||||
|
oe98zD6ZFVXaiz22/UwHs/JOHZiF9IQE81QlH5ubOPjDzhLh5UeZz/vEeML0uWfYFvWeDdbP4qqz
|
||||||
|
X/jUMPK2ns/lo4Ty4ldUf1Y6EJ2Cc+DldlDJ5NuJKdZ8GEL/SheMwE6v523mGdCg8hubNX3mDA+5
|
||||||
|
BrWCXOjOLQ5g8YCQgm7wI6ydL17NHpomwNt0SRELdamejMd1gtjjCoxechcM/K3ovuMnTA06sDQx
|
||||||
|
ekL22N2xtzcE87PsnldgQnjCWJFxPi7tIYPNORmJ8jOn/WiP7QC2YnZAQj7fwfzuxxBUs+vi/IOK
|
||||||
|
erpyXggaQBmB4s+5n9ukO8NPRd943wG+n1/oOUCuK10a8BVvzjYncVDVzC3a5EEDukcXh0qK2ED9
|
||||||
|
SXASfp1PVXIMgPewGQG9124MxgqpiAcgZIvOv5+ASI8N1Z9bIxHV3TTA6u0K2OZzqf/04rLZKj0a
|
||||||
|
qH6vJEbYs/ZhKqRH/MPbgDH3dLiqNy/ABBB32y+pJaXQaC2RbLj3mMyboF1gjGWfepkgmYOYcAbc
|
||||||
|
g/Mehw1uwPydDzur79ioRcRG+2hK8HMvdLqbAoWNtE4WuFMBIlIDtJw1iifA+5MgsjGdyhww8Ipv
|
||||||
|
vaCLCOx+tkIwwfKz3+OLcFpyUt1PHYQf94Tivr0FMx9WlmqSi0T1nZb087f+0uoyY68ZUcDeeqYp
|
||||||
|
1846Y7uCb5PiITeg2ECDBLX67IfrIDtwnQ/ETfQD3uCQH8HxcJWo/yNqidCWRIAS6w7YdbZ2LQwD
|
||||||
|
bEDweB3ors2G/kPAuYNZUf9QjZdqRiJbHkBS5DnGbRuyntM/CiyBjpEwb8t8lCVfgPmY7cjExVO9
|
||||||
|
TLJxVM2Mcdjg5S1ggabEwNHONsb+pwgG9jAd2LQ2pa4bbesWJWEK90ZkUuPG5QGLveQJ7wfLR1sc
|
||||||
|
Nskwpo9JPXPCG5u9GZtLkewUECXzEykU2kH/xcfTsTqi+XXOzWlMHws8nHmN7owO1+R1M13Iy9qF
|
||||||
|
1M+P3VNBeEWQp/oOa22u1MNjfzvCDyQSNb13D5ar+IihvakMiuQHydlt6CRl2bkXWqzruYSJO8H1
|
||||||
|
M9mSvuwXfOOe8A3VPZo6fWQTvwmewHF9njQ/x3tCvnwyGzGg7vntgeaeyxtoye0Te8dr0JMkepUQ
|
||||||
|
ZK1F063imEucdw40WkdEwpZV+SKpYghv2fZAEQAKoHxra1+++uIXmIJ3/4TCBRypeynEgFnteQC3
|
||||||
|
PckQe35eNaMOUiDffhSK7dTK+U1QLrBpupBirajzCSVWCg7UwFTr37o5acPlDOHHP1F8rF1zWi7L
|
||||||
|
BuxVH5Ol4Uw2RMahg5f8BqiRlWMyJjAS4PV8jekP14J8bITP8ss316tkJ7OtUE0JpWjGX/5lyQ8Q
|
||||||
|
gHK/pEi+sTGYbsanUiZBcNBWvtSgNy6bK5Q95wfvpiBjS/NMImAwJcA2TMf+NXW9BLnti1Jrir1k
|
||||||
|
VrbZBnaDG631qOWzaqXttz5xUKiAUXt2JwiyzsLh42QzXsdOBM/3rMRa2YX5GI9To3zn/yiVUtCz
|
||||||
|
+d3B1l0eSNT9pznvPjAC5BVd6O1ubXvmFNBRftxrTw73tqyX89gYoLqVJlGcTwtGHTvxFy+QNIdm
|
||||||
|
8DsepTZviMxTaZL8/bbA4SxqiPXlUn/XC3LisiXb0d7n7CLLEE6xWGPjqMlB/zSHEj7iJMVWB971
|
||||||
|
ZHoXBcKUCdiO0NNcxqsuAPxjn3DQH5aE8a5iwGN/9bDBb3f1Eg1XDu5Pyg7rwxT1y7fewqagaFGu
|
||||||
|
rTlkP+4ALgpIEHhmr2QafK+CH7MjNIzfVc1Op88Rno7lEd8RtRnXgU8ID3kWoKUWCaBknlJYnl7e
|
||||||
|
uh6f5DWS6AwKVnCEqx9LsGjtI4YlQ3ciPG2hnr3y0AKP03a46FhvtnFqaDDt3tq6vvua6XZpwFHt
|
||||||
|
Z2p5hgaEgQsHKDc/aOUPh03O0SaQ4zcq9YKeyz+RmCBY0meGeHysAI0UEAO7Gih2uTICU6bWHGyv
|
||||||
|
2Qv7z22U/PKXKl9bvC9GByx2VR3hV9/B+F31s3+6Od//R7ZaYebMCCdfNXs3oUawdOZynS4uFJdU
|
||||||
|
xL7mHfsJye8KhjhsSFmNpfntJwhtb6GBlwb1yg/TVw9g96IL/WKykwP9y3VP9z/JIaFj10Ywz582
|
||||||
|
jsU7M0cxN87KPlB4MoGmMEn0Csk/6xHuSLLy7REEYeCjit/u+iU4EA5ET98kAoxqc7gZjxIeyhtD
|
||||||
|
4yZ8BCxivQugHSyILYprTp1+v4KL18tYn7u5nqcgkqA5vAS6l+PSZDLoJGhddYQdeu/BmCdZAW+J
|
||||||
|
/8Du8MnYyPeqAw56pqC3RExTWGBjwOpWmVhbfz8RXCCAtR+xt0MJW6yjb4GRF3Xyxeuv3oLHH87G
|
||||||
|
3mtf1dOps+H3+9SxKTVpRtQMjKZr0fimPcAEtscFrPyCzSw1AO85nADzF5KoXxkgnwpP7qBQOU/q
|
||||||
|
uA1M6HgICsi/0Q9FbByC6UH8+Kv3qLnbifmUvkEBI3HaUacO+Hy+YWLA/XvwCOdvBzbbx0CST5oS
|
||||||
|
kZtTKcnYfTICzvumwLtEegc3r7x0MA2qHVGjq5WzDdYEuO9Ugt13WrI+FhEHjveOR4/S1vOFz7US
|
||||||
|
HpS7hYjwlPNRxyiGYKdpONEnHsy7DxfBlS8JGAJWD9/6WfFwrb86Xz6fSIJKHw7kfrl5wegTWYJg
|
||||||
|
sgKKVcFKph+JIOV5lTH6rP5juj1gBc2fEmOsAROw7e1jwXzWQqxX9xGMqXCQ1LWf6Q68poCMR3mA
|
||||||
|
RFJudD9LXc3AokWg0ZGDBD0m9cRvzEb5fCwOCYb0ACQrz/CrN6h2gJ45+Y7Qff0Hgd5bBssl1Cp1
|
||||||
|
7XfEQx6Zy82iLSBPoJCF3777edauxy9fU6M65P0ciUkISUMh9dNZ7+ew318hGAqH7vKQY8sXLxvj
|
||||||
|
86FOuxX75VtvBejMr37MhdPOUOBNat6oKQ5cMl1OzQTzMd3h9KKf+y/ewOLRLRTBFNT1Hmcl8NKQ
|
||||||
|
Ue8+ycFI63wBQ/64Iu3tl/XC9SACJ782ELhpmI1PLbyCr1766tNO/LlaQAozl5rr+vX6YLowS8sP
|
||||||
|
2oTKM1/9XgQd7WjT89o/9HTEKUy5Z0zd3blIiLS9uKAEJsZuz+u5mJ+XCqjb25k6DA754Pq9A/bm
|
||||||
|
2FArOpN+6t13A+VdfMLa4e72E9a2A9SWc0EU2zzVv/7W3sctgs4U9rNqRR2sHUyJzIpXMDF9skCc
|
||||||
|
/2xJ43+igN4YMOBUIwMHWQkSNjwE9MUfIlvcLp9VH3EApIKBw/34qNuyWVxVNNQ7ds6qUwu5e7Ug
|
||||||
|
asIMcU4V5lzZKD6MNK+k6MDZCb3IM1TFl6vhSxWN/fp+B+5P0g6fbFVPljMnuCAaF55sVn017AsX
|
||||||
|
Qr88idSEpMvp4VFcwchpW5qXR7UerLsfg1VPU/MxTP2qhxD01E6mule2Jou9vPnlt3V92FhFWwSN
|
||||||
|
c8xjL6m6nOj8u4GHG9oj/iqzZDz5s6uSy82muOK6fqj4LFWi+KhT734p82mJOw7mr1CiSNRlNnsN
|
||||||
|
FuD+UwbY/DiNOdSBz4FT66dkuWkPNm7fuzOs0yyi+Kxy/cwJcwvJoTthZ+2f5nULfOV4b3ki10IJ
|
||||||
|
xlxWG7DqO2o9wAYsSkJb8NUH/Z4Pg4XrWQSenJciwsbBXO7HhwDgWeWoRTCfsEM1persX316K953
|
||||||
|
c06pPoHgM5XYsw87MOmZ1cL3c97QdbzBdIJZCz2EH9TXmlPAlAo50DJ0FUlrPc7Xx0Kgr5xcursN
|
||||||
|
n3z0HI6DUXzWVz/pm4O3lD4YNKfFzuoHlyIQl189yGF5D5rUmlIosfZAnZ/jNhnf/SuEcIMhxbje
|
||||||
|
BkOXcTEk+M7QFkZz0FcjdJQ1b6A6iyzApPBEvvyDrfoRm/M+8RvoTpueJD8SWfXvFkGHZioS8uwd
|
||||||
|
sO3t4UBpz/dUk8O5ZjvkneHq76jPJw6b3jc0wZUPsVbuA/bxykOnhlmfI1mNd7kgbQ++epeOMZHV
|
||||||
|
+J188x34EkMOyScoBgsXF1cIK+ai27VLTHZCFwQFnrSEo7xbj/FwbiUjTOpvXsBmahABHqoDT6QO
|
||||||
|
E3O64OkJNzB4IdUzEJvBnsVwN9ghafbNK2B6cA5BYsA3Nj0/yKfDZClQ9I4DRWfi5P3ZvLbAlVi+
|
||||||
|
6vmfYA5q9QzrQTui3kv7fgH1gwP5NvRonNRpvljKiMDg0YwGjvQT0O5VtcAWzgwp/d2qhfvxwcGH
|
||||||
|
djvj3UuRgpXPBkjZ0lH9vpVzcp0uPuiDvUctlCj911/Bzi1DHARLGrCM8BlY+ejbD2Cculr55Xft
|
||||||
|
yISEqTuJAEF9pNQBZx2wt1v54OA9PbyPmGq+Z/gM4R3HO7Q0BJrjVw8H9qzSVe+teJV2wKxUC3tS
|
||||||
|
2uRDKAQCMMJDjfvP0ck/X77VL02DjXcA+3G+yD4Aw9XBu+PSJmN6VS0w6mcDewRAtrwv5wHAcusg
|
||||||
|
Tiolk6KbMoGX2ug40HQ/mECpL//4zYMXgOXJIw08suubelxc1PTeRRm86KKHA/P5yYmyl69g1T/U
|
||||||
|
k+jWZHHnljANtx11nBD3w/3gTXBvOTb2L66Sr/24wNp5IhzmG+ubN0F4obstYrudmNDT+ERw1duk
|
||||||
|
GgK7n9DwCOFNer6xBVWpfgmJgmBq+woifkbZ1z9DKTL2RHyXfTK/6KWF3zxon+6Xvi8ezVHNquD6
|
||||||
|
5cd6EOx8Uah6uVLNbpeaxiISIJHqDd5vQj0Q32/eBS8RcXRHD3JPtPYTgTV/WPWfZi5C6whg9XOI
|
||||||
|
W/Gmq5txgfJAcuo/eqP+1eMfTnGxrW40xu3yk/Cb1zwWZuUtGh5IDadCp97KJ0ITOw2AurQn9zWf
|
||||||
|
G3bQn8A0cBzdXZ1zsEjbiw9Lk+2ojXtQN9DfazB24hSBqeXMudiAUP7VW7gyg+nTvzXovJs9ktf1
|
||||||
|
WC5L3yorPyJRrq75fJqPkYLN+wGB01AFcz0PvoIk7Uqv3vsCqMxly5f/kFLaOCGW8kIAdi+JhshS
|
||||||
|
koFrE/TVz9hyqiEfAWkREHXvhl3tPiedOEkKeF4B/s0vnnoWtmD7ajC1N7mez6lwUGDUbxhGeeCA
|
||||||
|
JUwFqCSMr+je7Mdguh/0CWxgadGfA5/kLdcmIRwSMtBv/rfmKZwIKO0JOxUPttZjqqaCyZMWaK+A
|
||||||
|
GaHkKv4R/J98Ij8rFWRBnxLYezvGw2teAUAil0bAU/sV7y2A5h6TZb5u8+nU7aGSXyYTn794buUZ
|
||||||
|
gu2bz+hu94x6+bm7Pr/9+cXjYPk59S54Xd4F2a7+T2SSuMAVT4nknt/J6DU7Dq5+ilqakQa/+bKP
|
||||||
|
C4CEffNa9d+7BZ9OEpBkVC9zOog3DRZUoFg36iQgH7eHwPGgSFQwDD2BnbwBP5QwuofNnrFp13Ig
|
||||||
|
ltQT6Z/ZK58DwdAgr+cLOpxbkdG7kR/BeXO//PO+K36i3/x6kS7HZKl32QQd1+XxhaVqPn6ypYBK
|
||||||
|
MB8ptqlqzs/d8Qn7AHto004/ydJvvRiKzavGHi/NYFzvAzkefWo9no3JkHWCwCsz85s/5PP2LobQ
|
||||||
|
MsUtUvhply80PWWwFu8eghcQJeO9kc7wW39rHhpM+auEv/hzWP3XWB777je/drdKYzJ67wmsVeGM
|
||||||
|
da90g3U/g8ClW244qEsrWDKZLcDcVyppwKjUU0AYATuLt7AjyI+8b4VTCmdWnchWAzX7zddXfU2q
|
||||||
|
tNYT/pvXXmT7QPf6QavJy2yOUM6UC9aGR5UPDWpKWOlkQttyPoNZf2QbyJ9FiQirPp0NykI4vQwL
|
||||||
|
f/3ClO6cEjIWaTgSBtVk9qxNcN2PoT6LFjAsa1yhavoWW2IjMPK+FANMuSamOwGRnvmHlwHtdLyh
|
||||||
|
p/XSErF4NGd4MBGkuqe29fSUBAe4yzT+1odQemMMD5wt4m9exU9dLYG9SRvs/fL5pj6q5QfvyQY8
|
||||||
|
DVNcup8KXjItpehatf38UewC1tsuJSRCcdB+84RR/czULu5NP/P2AaqLsI8J16TbnFmfqf3mRxS7
|
||||||
|
wxIQVY6RUqh2Q/WQ49kE3wcJ7LZj9M33GJv4qwM3hUKwliddP1G+i8Ca39JQF4Ke85bWh7cpT7E3
|
||||||
|
bSYwUJK5UMx9gr/6fzTvWgHT6E1J3ZtLMB0iWQF80iR4f+2SYL4+FAK/+0/4ehSSV1E4MVjHj90d
|
||||||
|
24AxUTYKsCNlxEEtQkZXfwqKR7sQGG/AyvcHHz7S9rXmnQGbX/TQwji1MqwZs51z3gEgaPZ+gs0L
|
||||||
|
MHJ6Nq+d4uSLjJTKNQOGVHv66jm6P42Fycw8ytTyYW6ov96n6OUrIN6NgPr3lvQjObIKfuxioiu/
|
||||||
|
JwzdltXPpztqWz8k6Ir9YYDikokUWZqZtDfjU379Czb4ahfw1NtsYPlzCbC1FzJzni+zDy/0WiBo
|
||||||
|
jRLocvfqQGm7s6lpMq/n1nwHVsFPRfiTawfzuK8z4FK/xYGRn+tJSeUOdtpRRSpNheDZV2EGVz+G
|
||||||
|
QHlU+yk8TxzAsM6pW+ZhMB9sUEJZk1383R9YSH2BIPcuT2yUR9xPjd6G8lovdN/oXM9YfGzhcc+x
|
||||||
|
3987sV0TwxbcCrrqZzY75sWHo5KFVNfULVu25NaBczBdKc6zUzC/xu0A0w22MKZ3GSzslhiQnoMj
|
||||||
|
RUL3MIfPsvVhqBsfIvkJS4ZTpQoQt+CHVOjzk8wnaUiVQE2fSHzdfno6HswCnjvxRMDqL8a37TuQ
|
||||||
|
O0sWDn4+yBzr+OBCLqpE9EiOvsn58hSDexPM2D72z5ok3ikFsVqW37yoH7RgSEEjyzN25In0U1uU
|
||||||
|
BIrH+UF9Tz3Xcxb6R3i0kI4xdR8mcRtkgG8+rwZ21bcr/8DmfBjRnBw7c0ABF6uPIfpgfUgic4kV
|
||||||
|
1QIxsl3qHkb3myeUX/2A5nur9ZwycSWEMFeILAK7nlwz3sBnsNujNV+oOy4sI+g3o4Wtnw6a43CX
|
||||||
|
LPg+eAoOlWNQz2rHFnhJ+uDXfzZnlBuw7sKYniq4C1gX2tw//mGQQDDGo9SAr1/evcs+X9rKnf7B
|
||||||
|
/7NQMXK67xdYb9v0u3+XEEnSDdA0bYjjF5r633yF27ghmYaNwViNHo6aSwLEpjV2bLA2pwhk3bEn
|
||||||
|
FIdNPkuTQmDEQ5t6xpzk7LjzHJAKOo+9ci8DSmS3VKepkLEeKMd+9fMa+OYtYAiSmrH42gLb3l2w
|
||||||
|
Kf60fZu/yo0qWUjGyPqBZgu2xwmO58Ulwqv3AG+P5QDA5ASIa4q3uSjJu4NrvoHx3otrZhy1GJaP
|
||||||
|
9IaTIUvYkninDKSiKKLL+fJTL0k0VuAytlfCnSLaz7TOJ8ArXI4tPuzyyRm7Cm6CqMPou39uHQ0H
|
||||||
|
kDzZUbcNs4RVI2cBzqlfKx5GbPqOl+SHHfVQ6yXzyp+AAWeibsWn9VhFYgj++p4K+M9//fnzv74n
|
||||||
|
DJr2VrzWgwFjMY///u+jAv/Obtm/OU74PYZAhqws/vr7nxMIf336tvmM/3tsn8V7+OvvP8LvUYO/
|
||||||
|
xnbMXv/P5X+tL/rPf/0XAAAA//8DAPM57x7eIAAA
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 89de84040d6cdab1-MIA
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Encoding:
|
||||||
|
- gzip
|
||||||
|
Content-Type:
|
||||||
|
- application/json
|
||||||
|
Date:
|
||||||
|
- Thu, 04 Jul 2024 10:43:40 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
access-control-allow-origin:
|
||||||
|
- '*'
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-model:
|
||||||
|
- text-embedding-ada-002
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '15'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains
|
||||||
x-ratelimit-limit-requests:
|
x-ratelimit-limit-requests:
|
||||||
- '10000'
|
- '10000'
|
||||||
x-ratelimit-limit-tokens:
|
x-ratelimit-limit-tokens:
|
||||||
@@ -398,7 +393,7 @@ interactions:
|
|||||||
x-ratelimit-reset-tokens:
|
x-ratelimit-reset-tokens:
|
||||||
- 0s
|
- 0s
|
||||||
x-request-id:
|
x-request-id:
|
||||||
- req_834c84237ace9a79492cb9e8d1f68737
|
- req_c684605ab95c8ee822c1b082fc95d416
|
||||||
status:
|
status:
|
||||||
code: 200
|
code: 200
|
||||||
message: OK
|
message: OK
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from pydantic_core import ValidationError
|
|||||||
|
|
||||||
from crewai import Agent, Crew, Process, Task
|
from crewai import Agent, Crew, Process, Task
|
||||||
from crewai.tasks.task_output import TaskOutput
|
from crewai.tasks.task_output import TaskOutput
|
||||||
|
from crewai.utilities.converter import Converter
|
||||||
|
|
||||||
|
|
||||||
def test_task_tool_reflect_agent_tools():
|
def test_task_tool_reflect_agent_tools():
|
||||||
@@ -310,7 +311,7 @@ def test_output_json_to_another_task():
|
|||||||
|
|
||||||
crew = Crew(agents=[scorer], tasks=[task1, task2])
|
crew = Crew(agents=[scorer], tasks=[task1, task2])
|
||||||
result = crew.kickoff()
|
result = crew.kickoff()
|
||||||
assert '{\n "score": 3\n}' == result
|
assert '{\n "score": 5\n}' == result
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
@@ -393,6 +394,38 @@ def test_save_task_pydantic_output():
|
|||||||
save_file.assert_called_once_with('{"score":4}')
|
save_file.assert_called_once_with('{"score":4}')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_custom_converter_cls():
|
||||||
|
class ScoreOutput(BaseModel):
|
||||||
|
score: int
|
||||||
|
|
||||||
|
class ScoreConverter(Converter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
scorer = Agent(
|
||||||
|
role="Scorer",
|
||||||
|
goal="Score the title",
|
||||||
|
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||||
|
allow_delegation=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||||
|
expected_output="The score of the title.",
|
||||||
|
output_pydantic=ScoreOutput,
|
||||||
|
converter_cls=ScoreConverter,
|
||||||
|
agent=scorer,
|
||||||
|
)
|
||||||
|
|
||||||
|
crew = Crew(agents=[scorer], tasks=[task])
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
ScoreConverter, "to_pydantic", return_value=ScoreOutput(score=5)
|
||||||
|
) as mock_to_pydantic:
|
||||||
|
crew.kickoff()
|
||||||
|
mock_to_pydantic.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
def test_increment_delegations_for_hierarchical_process():
|
def test_increment_delegations_for_hierarchical_process():
|
||||||
from langchain_openai import ChatOpenAI
|
from langchain_openai import ChatOpenAI
|
||||||
@@ -413,31 +446,29 @@ def test_increment_delegations_for_hierarchical_process():
|
|||||||
agents=[scorer],
|
agents=[scorer],
|
||||||
tasks=[task],
|
tasks=[task],
|
||||||
process=Process.hierarchical,
|
process=Process.hierarchical,
|
||||||
manager_llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
manager_llm=ChatOpenAI(model="gpt-4o"),
|
||||||
)
|
)
|
||||||
|
|
||||||
with patch.object(Task, "increment_delegations") as increment_delegations:
|
with patch.object(Task, "increment_delegations") as increment_delegations:
|
||||||
increment_delegations.return_value = None
|
increment_delegations.return_value = None
|
||||||
crew.kickoff()
|
crew.kickoff()
|
||||||
increment_delegations.assert_called_once
|
increment_delegations.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
def test_increment_delegations_for_sequential_process():
|
def test_increment_delegations_for_sequential_process():
|
||||||
pass
|
|
||||||
|
|
||||||
manager = Agent(
|
manager = Agent(
|
||||||
role="Manager",
|
role="Manager",
|
||||||
goal="Coordinate scoring processes",
|
goal="Coordinate scoring processes",
|
||||||
backstory="You're great at delegating work about scoring.",
|
backstory="You're great at delegating work about scoring.",
|
||||||
allow_delegation=False,
|
allow_delegation=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
scorer = Agent(
|
scorer = Agent(
|
||||||
role="Scorer",
|
role="Scorer",
|
||||||
goal="Score the title",
|
goal="Score the title",
|
||||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||||
allow_delegation=False,
|
allow_delegation=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
task = Task(
|
task = Task(
|
||||||
@@ -455,7 +486,7 @@ def test_increment_delegations_for_sequential_process():
|
|||||||
with patch.object(Task, "increment_delegations") as increment_delegations:
|
with patch.object(Task, "increment_delegations") as increment_delegations:
|
||||||
increment_delegations.return_value = None
|
increment_delegations.return_value = None
|
||||||
crew.kickoff()
|
crew.kickoff()
|
||||||
increment_delegations.assert_called_once
|
increment_delegations.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
@@ -490,7 +521,7 @@ def test_increment_tool_errors():
|
|||||||
with patch.object(Task, "increment_tools_errors") as increment_tools_errors:
|
with patch.object(Task, "increment_tools_errors") as increment_tools_errors:
|
||||||
increment_tools_errors.return_value = None
|
increment_tools_errors.return_value = None
|
||||||
crew.kickoff()
|
crew.kickoff()
|
||||||
increment_tools_errors.assert_called_once
|
assert len(increment_tools_errors.mock_calls) == 3
|
||||||
|
|
||||||
|
|
||||||
def test_task_definition_based_on_dict():
|
def test_task_definition_based_on_dict():
|
||||||
|
|||||||
@@ -17,6 +17,10 @@ class TestPickleHandler(unittest.TestCase):
|
|||||||
os.remove(self.file_path)
|
os.remove(self.file_path)
|
||||||
|
|
||||||
def test_initialize_file(self):
|
def test_initialize_file(self):
|
||||||
|
assert os.path.exists(self.file_path) is False
|
||||||
|
|
||||||
|
self.handler.initialize_file()
|
||||||
|
|
||||||
assert os.path.exists(self.file_path) is True
|
assert os.path.exists(self.file_path) is True
|
||||||
assert os.path.getsize(self.file_path) >= 0
|
assert os.path.getsize(self.file_path) >= 0
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user