mirror of
https://github.com/crewAIInc/crewAI.git
synced 2025-12-16 12:28:30 +00:00
Compare commits
42 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1f802ccb5a | ||
|
|
e1306a8e6a | ||
|
|
997c906b5f | ||
|
|
2530196cf8 | ||
|
|
340bea3271 | ||
|
|
3df3bba756 | ||
|
|
a9863fe670 | ||
|
|
7b49b4e985 | ||
|
|
577db88f8e | ||
|
|
01a2e650a4 | ||
|
|
cd9f7931c9 | ||
|
|
2b04ae4e4a | ||
|
|
cd0b82e794 | ||
|
|
0ddcffe601 | ||
|
|
712d106a44 | ||
|
|
34c5560cb0 | ||
|
|
dcba1488a6 | ||
|
|
8e4b156f11 | ||
|
|
ab98c3bd28 | ||
|
|
7f98a99e90 | ||
|
|
101b80c234 | ||
|
|
44598babcb | ||
|
|
51edfb4604 | ||
|
|
12d6fa1494 | ||
|
|
99a15ac2ae | ||
|
|
093a9c8174 | ||
|
|
464dfc4e67 | ||
|
|
1c7f9826b4 | ||
|
|
e397a49c23 | ||
|
|
8c925237e7 | ||
|
|
0593d52b91 | ||
|
|
7b7d714109 | ||
|
|
e9aa87f62b | ||
|
|
8f5d735b2f | ||
|
|
e24f4867df | ||
|
|
ef024ca106 | ||
|
|
4c519d9d98 | ||
|
|
94cb96b288 | ||
|
|
108a0d36b7 | ||
|
|
efb097a76b | ||
|
|
af03042852 | ||
|
|
21667bc7e1 |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -5,4 +5,6 @@ dist/
|
||||
.env
|
||||
assets/*
|
||||
.idea
|
||||
test/
|
||||
test/
|
||||
docs_crew/
|
||||
chroma.sqlite3
|
||||
43
README.md
43
README.md
@@ -24,6 +24,7 @@
|
||||
- [Key Features](#key-features)
|
||||
- [Examples](#examples)
|
||||
- [Quick Tutorial](#quick-tutorial)
|
||||
- [Write Job Descriptions](#write-job-descriptions)
|
||||
- [Trip Planner](#trip-planner)
|
||||
- [Stock Analysis](#stock-analysis)
|
||||
- [Connecting Your Crew to a Model](#connecting-your-crew-to-a-model)
|
||||
@@ -48,6 +49,12 @@ To get started with CrewAI, follow these simple steps:
|
||||
pip install crewai
|
||||
```
|
||||
|
||||
If you want to also install crewai-tools, which is a package with tools that can be used by the agents, but more dependencies, you can install it with:
|
||||
|
||||
```shell
|
||||
pip install 'crewai[tools]'
|
||||
```
|
||||
|
||||
The example below also uses DuckDuckGo's Search. You can install it with `pip` too:
|
||||
|
||||
```shell
|
||||
@@ -62,9 +69,10 @@ from crewai import Agent, Task, Crew, Process
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
|
||||
|
||||
# You can choose to use a local model through Ollama for example. See ./docs/how-to/llm-connections.md for more information.
|
||||
# from langchain_community.llms import Ollama
|
||||
# ollama_llm = Ollama(model="openhermes")
|
||||
# You can choose to use a local model through Ollama for example. See https://docs.crewai.com/how-to/LLM-Connections/ for more information.
|
||||
# osOPENAI_API_BASE='http://localhost:11434/v1'
|
||||
# OPENAI_MODEL_NAME='openhermes' # Adjust based on available model
|
||||
# OPENAI_API_KEY=''
|
||||
|
||||
# Install duckduckgo-search for this example:
|
||||
# !pip install -U duckduckgo-search
|
||||
@@ -84,12 +92,12 @@ researcher = Agent(
|
||||
tools=[search_tool]
|
||||
# You can pass an optional llm attribute specifying what mode you wanna use.
|
||||
# It can be a local model through Ollama / LM Studio or a remote
|
||||
# model like OpenAI, Mistral, Antrophic or others (https://python.langchain.com/docs/integrations/llms/)
|
||||
# model like OpenAI, Mistral, Antrophic or others (https://docs.crewai.com/how-to/LLM-Connections/)
|
||||
#
|
||||
# Examples:
|
||||
# import os
|
||||
# os.environ['OPENAI_MODEL_NAME'] = 'gpt-3.5-turbo'
|
||||
#
|
||||
# from langchain_community.llms import Ollama
|
||||
# llm=ollama_llm # was defined above in the file
|
||||
# OR
|
||||
#
|
||||
# from langchain_openai import ChatOpenAI
|
||||
# llm=ChatOpenAI(model_name="gpt-3.5", temperature=0.7)
|
||||
@@ -100,15 +108,14 @@ writer = Agent(
|
||||
backstory="""You are a renowned Content Strategist, known for your insightful and engaging articles.
|
||||
You transform complex concepts into compelling narratives.""",
|
||||
verbose=True,
|
||||
allow_delegation=True,
|
||||
# (optional) llm=ollama_llm
|
||||
allow_delegation=True
|
||||
)
|
||||
|
||||
# Create tasks for your agents
|
||||
task1 = Task(
|
||||
description="""Conduct a comprehensive analysis of the latest advancements in AI in 2024.
|
||||
Identify key trends, breakthrough technologies, and potential industry impacts.
|
||||
Your final answer MUST be a full analysis report""",
|
||||
Identify key trends, breakthrough technologies, and potential industry impacts.""",
|
||||
expected_output="Full analysis report in bullet points",
|
||||
agent=researcher
|
||||
)
|
||||
|
||||
@@ -116,8 +123,8 @@ task2 = Task(
|
||||
description="""Using the insights provided, develop an engaging blog
|
||||
post that highlights the most significant AI advancements.
|
||||
Your post should be informative yet accessible, catering to a tech-savvy audience.
|
||||
Make it sound cool, avoid complex words so it doesn't sound like AI.
|
||||
Your final answer MUST be the full blog post of at least 4 paragraphs.""",
|
||||
Make it sound cool, avoid complex words so it doesn't sound like AI.""",
|
||||
expected_output="Full blog post of at least 4 paragraphs",
|
||||
agent=writer
|
||||
)
|
||||
|
||||
@@ -143,6 +150,8 @@ In addition to the sequential process, you can use the hierarchical process, whi
|
||||
- **Autonomous Inter-Agent Delegation**: Agents can autonomously delegate tasks and inquire amongst themselves, enhancing problem-solving efficiency.
|
||||
- **Flexible Task Management**: Define tasks with customizable tools and assign them to agents dynamically.
|
||||
- **Processes Driven**: Currently only supports `sequential` task execution and `hierarchical` processes, but more complex processes like consensual and autonomous are being worked on.
|
||||
- **Save output as file**: Save the output of individual tasks as a file, so you can use it later.
|
||||
- **Parse output as Pydantic or Json**: Parse the output of individual tasks as a Pydantic model or as a Json if you want to.
|
||||
- **Works with Open Source Models**: Run your crew using Open AI or open source models refer to the [Connect crewAI to LLMs](https://docs.crewai.com/how-to/LLM-Connections/) page for details on configuring you agents' connections to models, even ones running locally!
|
||||
|
||||

|
||||
@@ -160,6 +169,12 @@ You can test different real life examples of AI crews in the [crewAI-examples re
|
||||
|
||||
[](https://www.youtube.com/watch?v=tnejrr-0a94 "CrewAI Tutorial")
|
||||
|
||||
### Write Job Descriptions
|
||||
|
||||
[Check out code for this example](https://github.com/joaomdmoura/crewAI-examples/tree/main/job-posting) or watch a video below:
|
||||
|
||||
[](https://www.youtube.com/watch?v=u98wEMz-9to "Jobs postings")
|
||||
|
||||
### Trip Planner
|
||||
|
||||
[Check out code for this example](https://github.com/joaomdmoura/crewAI-examples/tree/main/trip_planner) or watch a video below:
|
||||
@@ -180,7 +195,7 @@ Please refer to the [Connect crewAI to LLMs](https://docs.crewai.com/how-to/LLM-
|
||||
|
||||
## How CrewAI Compares
|
||||
|
||||
- **Autogen**: While Autogen excels in creating conversational agents capable of working together, it lacks an inherent concept of process. In Autogen, orchestrating agents' interactions requires additional programming, which can become complex and cumbersome as the scale of tasks grows.
|
||||
- **Autogen**: While Autogen does good in creating conversational agents capable of working together, it lacks an inherent concept of process. In Autogen, orchestrating agents' interactions requires additional programming, which can become complex and cumbersome as the scale of tasks grows.
|
||||
|
||||
- **ChatDev**: ChatDev introduced the idea of processes into the realm of AI agents, but its implementation is quite rigid. Customizations in ChatDev are limited and not geared towards production environments, which can hinder scalability and flexibility in real-world applications.
|
||||
|
||||
|
||||
@@ -15,26 +15,27 @@ description: What are crewAI Agents and how to use them.
|
||||
|
||||
## Agent Attributes
|
||||
|
||||
| Attribute | Description |
|
||||
| :---------- | :----------------------------------- |
|
||||
| **Role** | Defines the agent's function within the crew. It determines the kind of tasks the agent is best suited for. |
|
||||
| **Goal** | The individual objective that the agent aims to achieve. It guides the agent's decision-making process. |
|
||||
| **Backstory** | Provides context to the agent's role and goal, enriching the interaction and collaboration dynamics. |
|
||||
| **LLM** | The language model used by the agent to process and generate text. |
|
||||
| **Tools** | Set of capabilities or functions that the agent can use to perform tasks. Tools can be shared or exclusive to specific agents. |
|
||||
| **Function Calling LLM** | The language model used by this agent to call functions, if none is passed the same main llm for each agent will be used. |
|
||||
| **Max Iter** | The maximum number of iterations the agent can perform before forced to give its best answer |
|
||||
| **Max RPM** | The maximum number of requests per minute the agent can perform to avoid rate limits |
|
||||
| **Verbose** | This allow you to actually see what is going on during the Crew execution. |
|
||||
| **Allow Delegation** | Agents can delegate tasks or questions to one another, ensuring that each task is handled by the most suitable agent. |
|
||||
| **Step Callback** | A function that is called after each step of the agent. This can be used to log the agent's actions or to perform other operations. It will overwrite the crew `step_callback` |
|
||||
| Attribute | Description |
|
||||
| :------------------ | :----------------------------------- |
|
||||
| **Role** | Defines the agent's function within the crew. It determines the kind of tasks the agent is best suited for. |
|
||||
| **Goal** | The individual objective that the agent aims to achieve. It guides the agent's decision-making process. |
|
||||
| **Backstory** | Provides context to the agent's role and goal, enriching the interaction and collaboration dynamics. |
|
||||
| **LLM** | The language model used by the agent to process and generate text. Defaults to using OpenAI's GPT-4 (`ChatOpenAI`), unless another model is specified through the environment variable "OPENAI_MODEL_NAME". |
|
||||
| **Tools** | Set of capabilities or functions that the agent can use to perform tasks. Tools can be shared or exclusive to specific agents. It's an attribute that can be set during the initialization of an agent. |
|
||||
| **Function Calling LLM** | The language model used by this agent to call functions. It is an optional field and, if not provided, the behavior of defaulting to the main `llm` is implicit. |
|
||||
| **Max Iter** | The maximum number of iterations the agent can perform before being forced to give its best answer. Default is `15`. |
|
||||
| **Max RPM** | The maximum number of requests per minute the agent can perform to avoid rate limits. It's optional and can be left unspecified. |
|
||||
| **Verbose** | Enables detailed logging of the agent's execution for debugging or monitoring purposes when set to True. Default is `False` |
|
||||
| **Allow Delegation**| Agents can delegate tasks or questions to one another, ensuring that each task is handled by the most suitable agent. |
|
||||
| **Step Callback** | A function that is called after each step of the agent. This can be used to log the agent's actions or to perform other operations. It will overwrite the crew `step_callback`. |
|
||||
| **Memory** | Indicates whether the agent should have memory or not, with a default value of False. This impacts the agent's ability to remember past interactions. Default is `False` |
|
||||
|
||||
## Creating an Agent
|
||||
|
||||
!!! note "Agent Interaction"
|
||||
Agents can interact with each other using the CrewAI's built-in delegation and communication mechanisms.<br/>This allows for dynamic task management and problem-solving within the crew.
|
||||
|
||||
To create an agent, you would typically initialize an instance of the `Agent` class with the desired properties. Here's a conceptual example:
|
||||
To create an agent, you would typically initialize an instance of the `Agent` class with the desired properties. Here's a conceptual example including all attributes:
|
||||
|
||||
```python
|
||||
# Example: Creating an agent with all attributes
|
||||
@@ -48,14 +49,15 @@ agent = Agent(
|
||||
to the business.
|
||||
You're currently working on a project to analyze the
|
||||
performance of our marketing campaigns.""",
|
||||
tools=[my_tool1, my_tool2],
|
||||
llm=my_llm,
|
||||
function_calling_llm=my_llm,
|
||||
max_iter=10,
|
||||
max_rpm=10,
|
||||
verbose=True,
|
||||
allow_delegation=True,
|
||||
step_callback=my_intermediate_step_callback
|
||||
tools=[my_tool1, my_tool2], # Optional
|
||||
llm=my_llm, # Optional
|
||||
function_calling_llm=my_llm, # Optional
|
||||
max_iter=15, # Optional
|
||||
max_rpm=None, # Optional
|
||||
verbose=True, # Optional
|
||||
allow_delegation=True, # Optional
|
||||
step_callback=my_intermediate_step_callback, # Optional
|
||||
memory=True # Optional
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: How Agents Collaborate in CrewAI
|
||||
description: Exploring the dynamics of agent collaboration within the CrewAI framework.
|
||||
description: Exploring the dynamics of agent collaboration within the CrewAI framework, focusing on the newly integrated features for enhanced functionality.
|
||||
---
|
||||
|
||||
## Collaboration Fundamentals
|
||||
@@ -11,14 +11,27 @@ description: Exploring the dynamics of agent collaboration within the CrewAI fra
|
||||
- **Task Assistance**: Allows agents to seek help from peers with the required expertise for specific tasks.
|
||||
- **Resource Allocation**: Optimizes task execution through the efficient distribution and sharing of resources among agents.
|
||||
|
||||
## Enhanced Attributes for Improved Collaboration
|
||||
The `Crew` class has been enriched with several attributes to support advanced functionalities:
|
||||
|
||||
- **Language Model Management (`manager_llm`, `function_calling_llm`)**: Manages language models for executing tasks and tools, facilitating sophisticated agent-tool interactions.
|
||||
- **Process Flow (`process`)**: Defines the execution logic (e.g., sequential, hierarchical) to streamline task distribution and execution.
|
||||
- **Verbose Logging (`verbose`)**: Offers detailed logging capabilities for monitoring and debugging purposes.
|
||||
- **Configuration (`config`)**: Allows extensive customization to tailor the crew's behavior according to specific requirements.
|
||||
- **Rate Limiting (`max_rpm`)**: Ensures efficient utilization of resources by limiting requests per minute.
|
||||
- **Internationalization Support (`language`)**: Facilitates operation in multiple languages, enhancing global usability.
|
||||
- **Execution and Output Handling (`full_output`)**: Distinguishes between full and final outputs for nuanced control over task results.
|
||||
- **Callback and Telemetry (`step_callback`)**: Integrates callbacks for step-wise execution monitoring and telemetry for performance analytics.
|
||||
- **Crew Sharing (`share_crew`)**: Enables sharing of crew information with CrewAI for continuous improvement.
|
||||
|
||||
## Delegation: Dividing to Conquer
|
||||
Delegation enhances functionality by allowing agents to intelligently assign tasks or seek help, thereby amplifying the crew's overall capability.
|
||||
|
||||
## Implementing Collaboration and Delegation
|
||||
Setting up a crew involves defining the roles and capabilities of each agent. CrewAI seamlessly manages their interactions, ensuring efficient collaboration and delegation.
|
||||
Setting up a crew involves defining the roles and capabilities of each agent. CrewAI seamlessly manages their interactions, ensuring efficient collaboration and delegation, with enhanced customization and monitoring features to adapt to various operational needs.
|
||||
|
||||
## Example Scenario
|
||||
Imagine a crew with a researcher agent tasked with data gathering and a writer agent responsible for compiling reports. The writer can delegate research tasks or ask questions to the researcher, facilitating a seamless workflow.
|
||||
Consider a crew with a researcher agent tasked with data gathering and a writer agent responsible for compiling reports. The integration of advanced language model management and process flow attributes allows for more sophisticated interactions, such as the writer delegating complex research tasks to the researcher or querying specific information, thereby facilitating a seamless workflow.
|
||||
|
||||
## Conclusion
|
||||
Collaboration and delegation are pivotal, transforming individual AI agents into a coherent, intelligent crew capable of tackling complex tasks. CrewAI's framework not only simplifies these interactions but enhances their effectiveness, paving the way for sophisticated AI-driven solutions.
|
||||
The integration of advanced attributes and functionalities into the CrewAI framework significantly enriches the agent collaboration ecosystem. These enhancements not only simplify interactions but also offer unprecedented flexibility and control, paving the way for sophisticated AI-driven solutions capable of tackling complex tasks through intelligent collaboration and delegation.
|
||||
|
||||
@@ -15,18 +15,17 @@ description: Understanding and utilizing crews in the crewAI framework.
|
||||
| **Agents** | A list of agents that are part of the crew. |
|
||||
| **Process** | The process flow (e.g., sequential, hierarchical) the crew follows. |
|
||||
| **Verbose** | The verbosity level for logging during execution. |
|
||||
| **Manager LLM** | The language model used by the manager agent in a hierarchical process. |
|
||||
| **Function Calling LLM** | The language model used by all agensts in the crew to call functions, if none is passed the same main llm for each agent will be used. |
|
||||
| **Config** | Configuration settings for the crew. |
|
||||
| **Manager LLM** | The language model used by the manager agent in a hierarchical process. **Required when using a hierarchical process.** |
|
||||
| **Function Calling LLM** | The language model used by all agents in the crew for calling functions. If none is passed, the main LLM for each agent will be used. |
|
||||
| **Config** | Optional configuration settings for the crew, in `Json` or `Dict[str, Any]` format. |
|
||||
| **Max RPM** | Maximum requests per minute the crew adheres to during execution. |
|
||||
| **Language** | Language setting for the crew's operation. |
|
||||
| **Full Output** | Whether the crew should return the full output with all tasks outputs or just the final output. |
|
||||
| **Step Callback** | A function that is called after each step of every agent. This can be used to log the agent's actions or to perform other operations, it won't override the agent specific `step_callback` |
|
||||
| **Share Crew** | Whether you want to share the complete crew infromation and execution with the crewAI team to make the library better, and allow us to train models. |
|
||||
|
||||
| **Language** | Language used for the crew, defaults to English. |
|
||||
| **Full Output** | Whether the crew should return the full output with all tasks outputs or just the final output. |
|
||||
| **Step Callback** | A function that is called after each step of every agent. This can be used to log the agent's actions or to perform other operations; it won't override the agent-specific `step_callback`. |
|
||||
| **Share Crew** | Whether you want to share the complete crew information and execution with the crewAI team to make the library better, and allow us to train models. |
|
||||
|
||||
!!! note "Crew Max RPM"
|
||||
The `max_rpm` attribute sets the maximum number of requests per minute the crew can perform to avoid rate limits and will override individual agents `max_rpm` settings if you set it.
|
||||
The `max_rpm` attribute sets the maximum number of requests per minute the crew can perform to avoid rate limits and will override individual agents' `max_rpm` settings if you set it.
|
||||
|
||||
## Creating a Crew
|
||||
|
||||
@@ -68,7 +67,7 @@ my_crew = Crew(
|
||||
## Crew Execution Process
|
||||
|
||||
- **Sequential Process**: Tasks are executed one after another, allowing for a linear flow of work.
|
||||
- **Hierarchical Process**: A manager agent coordinates the crew, delegating tasks and validating outcomes before proceeding.
|
||||
- **Hierarchical Process**: A manager agent coordinates the crew, delegating tasks and validating outcomes before proceeding. **Note**: A `manager_llm` is required for this process.
|
||||
|
||||
### Kicking Off a Crew
|
||||
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
---
|
||||
title: Managing Processes in CrewAI
|
||||
description: An overview of workflow management through processes in CrewAI.
|
||||
description: Detailed guide on workflow management through processes in CrewAI, with updated implementation details.
|
||||
---
|
||||
|
||||
## Understanding Processes
|
||||
!!! note "Core Concept"
|
||||
Processes in CrewAI orchestrate how tasks are executed by agents, akin to project management in human teams. They ensure tasks are distributed and completed efficiently, according to a predefined game plan.
|
||||
In CrewAI, processes orchestrate the execution of tasks by agents, akin to project management in human teams. These processes ensure tasks are distributed and executed efficiently, in alignment with a predefined strategy.
|
||||
|
||||
## Process Implementations
|
||||
|
||||
- **Sequential**: Executes tasks one after another, ensuring a linear and orderly progression.
|
||||
- **Hierarchical**: Implements a chain of command, where tasks are delegated and executed based on a managerial structure.
|
||||
- **Consensual (WIP)**: Future process type aiming for collaborative decision-making among agents on task execution.
|
||||
- **Sequential**: Executes tasks sequentially, ensuring tasks are completed in an orderly progression.
|
||||
- **Hierarchical**: Organizes tasks in a managerial hierarchy, where tasks are delegated and executed based on a structured chain of command, the manager for delegation is automatically created by crewAI.
|
||||
- **Consensual (Planned)**: A future process type aiming for collaborative decision-making among agents on task execution, introducing a more democratic approach to task management within CrewAI.
|
||||
|
||||
## The Role of Processes in Teamwork
|
||||
Processes transform individual agents into a unified team, coordinating their efforts to achieve common goals with efficiency and harmony.
|
||||
Processes enable individual agents to operate as a cohesive unit, streamlining their efforts to achieve common objectives with efficiency and coherence.
|
||||
|
||||
## Assigning Processes to a Crew
|
||||
Specify the process during crew creation to determine the execution strategy:
|
||||
Specify the process type upon crew creation to set the execution strategy:
|
||||
|
||||
```python
|
||||
from crewai import Crew
|
||||
@@ -29,20 +29,21 @@ crew = Crew(agents=my_agents, tasks=my_tasks, process=Process.sequential)
|
||||
# Example: Creating a crew with a hierarchical process
|
||||
crew = Crew(agents=my_agents, tasks=my_tasks, process=Process.hierarchical)
|
||||
```
|
||||
**Note:** Ensure `my_agents` and `my_tasks` are defined prior to creating a `Crew` object.
|
||||
|
||||
## Sequential Process
|
||||
Ensures a natural flow of work, mirroring human team dynamics by progressing through tasks thoughtfully and systematically.
|
||||
This method mirrors dynamic team workflows, progressing through tasks in a thoughtful and systematic manner. Task execution follows the predefined order in the task list, with the output of one task serving as context for the next.
|
||||
|
||||
Tasks need to be pre-assigned to agents, and the order of execution is determined by the order of the tasks in the list.
|
||||
|
||||
Tasks are executed one after another, ensuring a linear and orderly progression and the output of one task is automatically used as context into the next task.
|
||||
|
||||
You can also define specific task's outputs that should be used as context for another task by using the `context` parameter in the `Task` class.
|
||||
To customize task context, utilize the `context` parameter in the `Task` class to specify outputs that should be used as context for subsequent tasks.
|
||||
|
||||
## Hierarchical Process
|
||||
Mimics a corporate hierarchy, where a manager oversees task execution, planning, delegation, and validation, enhancing task coordination.
|
||||
Emulates a corporate hierarchy. A "manager" agent is automatically created so it oversees task execution, including planning, delegation, and validation. Tasks are not pre-assigned; the manager allocates tasks to agents, reviews outputs, and assesses task completion.
|
||||
|
||||
In this process tasks don't need to be pre-assigned to agents, the manager will decide which agent will perform each task, review the output and decide if the task is completed or not.
|
||||
## Process Class: Detailed Overview
|
||||
The `Process` class is implemented as an enumeration (`Enum`), ensuring type safety and restricting process values to the defined types (`sequential` and `hierarchical`). This design choice guarantees that only valid processes are utilized within the CrewAI framework.
|
||||
|
||||
## Planned Future Processes
|
||||
- **Consensual Process**: A collaborative decision-making process among agents on task execution is planned but not currently implemented. This future enhancement will introduce a more democratic approach to task management within CrewAI.
|
||||
|
||||
## Conclusion
|
||||
Processes are vital for structured collaboration within CrewAI, enabling agents to work together systematically. Future updates will introduce new processes, further mimicking the adaptability and complexity of human teamwork.
|
||||
The structured collaboration facilitated by processes within CrewAI is crucial for enabling systematic teamwork among agents. Documentation will be updated to reflect new processes and enhancements, ensuring users have access to the most current and comprehensive information.
|
||||
|
||||
@@ -5,36 +5,39 @@ description: Overview and management of tasks within the crewAI framework.
|
||||
|
||||
## Overview of a Task
|
||||
!!! note "What is a Task?"
|
||||
In the CrewAI framework, tasks are individual assignments that agents complete. They encapsulate necessary information for execution, including a description, assigned agent, and required tools, offering flexibility for various action complexities.
|
||||
In the CrewAI framework, tasks are individual assignments that agents complete. They encapsulate necessary information for execution, including a description, assigned agent, required tools, offering flexibility for various action complexities.
|
||||
|
||||
Tasks in CrewAI can be designed to require collaboration between agents. For example, one agent might gather data while another analyzes it. This collaborative approach can be defined within the task properties and managed by the Crew's process.
|
||||
|
||||
## Task Attributes
|
||||
|
||||
| Attribute | Description |
|
||||
| :---------- | :----------------------------------- |
|
||||
| :------------- | :----------------------------------- |
|
||||
| **Description** | A clear, concise statement of what the task entails. |
|
||||
| **Agent** | Optionally, you can specify which agent is responsible for the task. If not, the crew's process will determine who takes it on. |
|
||||
| **Expected Output** *(optional)* | Clear and detailed definition of expected output for the task. |
|
||||
| **Tools** *(optional)* | These are the functions or capabilities the agent can utilize to perform the task. They can be anything from simple actions like 'search' to more complex interactions with other agents or APIs. |
|
||||
| **Async Execution** *(optional)* | If the task should be executed asynchronously. |
|
||||
| **Context** *(optional)* | Other tasks that will have their output used as context for this task, if one is an asynchronous task it will wait for that to finish |
|
||||
| **Async Execution** *(optional)* | If the task should be executed asynchronously. This indicates that the crew will not wait for the task to be completed to continue with the next task. |
|
||||
| **Context** *(optional)* | Other tasks that will have their output used as context for this task. If a task is asynchronous, the system will wait for that to finish before using its output as context. |
|
||||
| **Output JSON** *(optional)* | Takes a pydantic model and returns the output as a JSON object. **Agent LLM needs to be using OpenAI client, could be Ollama for example but using the OpenAI wrapper** |
|
||||
| **Output Pydantic** *(optional)* | Takes a pydantic model and returns the output as a pydantic object. **Agent LLM needs to be using OpenAI client, could be Ollama for example but using the OpenAI wrapper** |
|
||||
| **Output File** *(optional)* | Takes a file path and saves the output of the task on it. |
|
||||
| **Callback** *(optional)* | A function to be executed after the task is completed. |
|
||||
|
||||
## Creating a Task
|
||||
|
||||
This is the simpliest example for creating a task, it involves defining its scope and agent, but there are optional attributes that can provide a lot of flexibility:
|
||||
This is the simplest example for creating a task, it involves defining its scope and agent, but there are optional attributes that can provide a lot of flexibility:
|
||||
|
||||
```python
|
||||
from crewai import Task
|
||||
|
||||
task = Task(
|
||||
description='Find and summarize the latest and most relevant news on AI',
|
||||
agent=sales_agent
|
||||
description='Find and summarize the latest and most relevant news on AI',
|
||||
agent=sales_agent
|
||||
)
|
||||
```
|
||||
!!! note "Task Assignment"
|
||||
Tasks can be assigned directly by specifying an `agent` to them, or they can be assigned in run time if you are using the `hierarchical` through CrewAI's process, considering roles, availability, or other criteria.
|
||||
Tasks can be assigned directly by specifying an `agent` to them, or they can be assigned in run time if you are using the `hierarchical` through CrewAI's process, considering roles, availability, or other criteria.
|
||||
|
||||
## Integrating Tools with Tasks
|
||||
|
||||
@@ -51,12 +54,12 @@ from langchain.agents import Tool
|
||||
from langchain_community.tools import DuckDuckGoSearchRun
|
||||
|
||||
research_agent = Agent(
|
||||
role='Researcher',
|
||||
goal='Find and summarize the latest AI news',
|
||||
backstory="""You're a researcher at a large company.
|
||||
You're responsible for analyzing data and providing insights
|
||||
to the business."""
|
||||
verbose=True
|
||||
role='Researcher',
|
||||
goal='Find and summarize the latest AI news',
|
||||
backstory="""You're a researcher at a large company.
|
||||
You're responsible for analyzing data and providing insights
|
||||
to the business."""
|
||||
verbose=True
|
||||
)
|
||||
|
||||
# Install duckduckgo-search for this example:
|
||||
@@ -65,15 +68,15 @@ search_tool = DuckDuckGoSearchRun()
|
||||
|
||||
task = Task(
|
||||
description='Find and summarize the latest AI news',
|
||||
expected_output='A bullet list summary of the top 5 most important AI news',
|
||||
agent=research_agent,
|
||||
expected_output='A bullet list summary of the top 5 most important AI news',
|
||||
agent=research_agent,
|
||||
tools=[search_tool]
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
agents=[research_agent],
|
||||
tasks=[task],
|
||||
verbose=2
|
||||
agents=[research_agent],
|
||||
tasks=[task],
|
||||
verbose=2
|
||||
)
|
||||
|
||||
result = crew.kickoff()
|
||||
@@ -82,9 +85,9 @@ print(result)
|
||||
|
||||
This demonstrates how tasks with specific tools can override an agent's default set for tailored task execution.
|
||||
|
||||
## Refering other Tasks
|
||||
## Referring to Other Tasks
|
||||
|
||||
In crewAI the output of one task is automatically relayed into the next one, but you can specifically define what tasks output should be used as context for another task.
|
||||
In crewAI, the output of one task is automatically relayed into the next one, but you can specifically define what tasks' output should be used as context for another task.
|
||||
|
||||
This is useful when you have a task that depends on the output of another task that is not performed immediately after it. This is done through the `context` attribute of the task:
|
||||
|
||||
@@ -93,15 +96,15 @@ This is useful when you have a task that depends on the output of another task t
|
||||
|
||||
research_task = Task(
|
||||
description='Find and summarize the latest AI news',
|
||||
expected_output='A bullet list summary of the top 5 most important AI news',
|
||||
agent=research_agent,
|
||||
expected_output='A bullet list summary of the top 5 most important AI news',
|
||||
agent=research_agent,
|
||||
tools=[search_tool]
|
||||
)
|
||||
|
||||
write_blog_task = Task(
|
||||
description="Write a full blog post about the importante of AI and it's latest news",
|
||||
expected_output='Full blog post that is 4 paragraphs long',
|
||||
agent=writer_agent,
|
||||
description="Write a full blog post about the importance of AI and its latest news",
|
||||
expected_output='Full blog post that is 4 paragraphs long',
|
||||
agent=writer_agent,
|
||||
context=[research_task]
|
||||
)
|
||||
|
||||
@@ -110,7 +113,7 @@ write_blog_task = Task(
|
||||
|
||||
## Asynchronous Execution
|
||||
|
||||
You can define a task to be executed asynchronously, this means that the crew will not wait for it to be completed to continue with the next task. This is useful for tasks that take a long time to be completed, or that are not crucial for the next tasks to be performed.
|
||||
You can define a task to be executed asynchronously. This means that the crew will not wait for it to be completed to continue with the next task. This is useful for tasks that take a long time to be completed, or that are not crucial for the next tasks to be performed.
|
||||
|
||||
You can then use the `context` attribute to define in a future task that it should wait for the output of the asynchronous task to be completed.
|
||||
|
||||
@@ -118,7 +121,7 @@ You can then use the `context` attribute to define in a future task that it shou
|
||||
#...
|
||||
|
||||
list_ideas = Task(
|
||||
description="List of 5 interesting ideas to explore for na article about AI.",
|
||||
description="List of 5 interesting ideas to explore for an article about AI.",
|
||||
expected_output="Bullet point list of 5 ideas for an article.",
|
||||
agent=researcher,
|
||||
async_execution=True # Will be executed asynchronously
|
||||
@@ -132,7 +135,7 @@ list_important_history = Task(
|
||||
)
|
||||
|
||||
write_article = Task(
|
||||
description="Write an article about AI, it's history and interesting ideas.",
|
||||
description="Write an article about AI, its history, and interesting ideas.",
|
||||
expected_output="A 4 paragraph article about AI.",
|
||||
agent=writer,
|
||||
context=[list_ideas, list_important_history] # Will wait for the output of the two tasks to be completed
|
||||
@@ -154,7 +157,7 @@ def callback_function(output: TaskOutput):
|
||||
print(f"""
|
||||
Task completed!
|
||||
Task: {output.description}
|
||||
Output: {output.result}
|
||||
Output: {output.raw_output}
|
||||
""")
|
||||
|
||||
research_task = Task(
|
||||
@@ -168,7 +171,7 @@ research_task = Task(
|
||||
#...
|
||||
```
|
||||
|
||||
## Accessing a specific Task Output
|
||||
## Accessing a Specific Task Output
|
||||
|
||||
Once a crew finishes running, you can access the output of a specific task by using the `output` attribute of the task object:
|
||||
|
||||
@@ -195,18 +198,24 @@ result = crew.kickoff()
|
||||
print(f"""
|
||||
Task completed!
|
||||
Task: {task1.output.description}
|
||||
Output: {task1.output.result}
|
||||
Output: {task1.output.raw_output}
|
||||
""")
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Tool Override Mechanism
|
||||
|
||||
Specifying tools in a task allows for dynamic adaptation of agent capabilities, emphasizing CrewAI's flexibility.
|
||||
|
||||
## Error Handling and Validation Mechanisms
|
||||
|
||||
While creating and executing tasks, certain validation mechanisms are in place to ensure the robustness and reliability of task attributes. These include but are not limited to:
|
||||
|
||||
- Ensuring only one output type is set per task to maintain clear output expectations.
|
||||
- Preventing the manual assignment of the `id` attribute to uphold the integrity of the unique identifier system.
|
||||
|
||||
These validations help in maintaining the consistency and reliability of task executions within the crewAI framework.
|
||||
|
||||
## Conclusion
|
||||
|
||||
Tasks are the driving force behind the actions of agents in crewAI. By properly defining tasks and their outcomes, you set the stage for your AI agents to work effectively, either independently or as a collaborative unit.
|
||||
Equipping tasks with appropriate tools is crucial for maximizing CrewAI's potential, ensuring agents are effectively prepared for their assignments.
|
||||
Equipping tasks with appropriate tools and following robust validation practices is crucial for maximizing CrewAI's potential, ensuring agents are effectively prepared for their assignments and that tasks are executed as intended.
|
||||
@@ -1,65 +1,212 @@
|
||||
---
|
||||
title: crewAI Tools
|
||||
description: Understanding and leveraging tools within the crewAI framework.
|
||||
description: Understanding and leveraging tools within the crewAI framework for agent collaboration and task execution.
|
||||
---
|
||||
|
||||
## Introduction
|
||||
CrewAI tools empower agents with capabilities ranging from web searching and data analysis to collaboration and delegating tasks among coworkers. This documentation outlines how to create, integrate, and leverage these tools within the CrewAI framework, including a new focus on collaboration tools.
|
||||
|
||||
## What is a Tool?
|
||||
!!! note "Definition"
|
||||
A tool in CrewAI, is a skill, something Agents can use perform tasks, right now those can be tools from the [crewAI Toolkit](https://github.com/joaomdmoura/crewai-tools) and [LangChain Tools](https://python.langchain.com/docs/integrations/tools), those are basically functions that an agent can utilize for various actions, from simple searches to complex interactions with external systems.
|
||||
A tool in CrewAI is a skill or function that agents can utilize to perform various actions. This includes tools from the [crewAI Toolkit](https://github.com/joaomdmoura/crewai-tools) and [LangChain Tools](https://python.langchain.com/docs/integrations/tools), enabling everything from simple searches to complex interactions and effective teamwork among agents.
|
||||
|
||||
## Key Characteristics of Tools
|
||||
|
||||
- **Utility**: Designed for specific tasks such as web searching, data analysis, or content generation.
|
||||
- **Integration**: Enhance agent capabilities by integrating tools directly into their workflow.
|
||||
- **Customizability**: Offers the flexibility to develop custom tools or use existing ones from LangChain's ecosystem.
|
||||
- **Utility**: Designed for various tasks such as web searching, data analysis, content generation, and agent collaboration.
|
||||
- **Integration**: Enhances agent capabilities by integrating tools directly into their workflow.
|
||||
- **Customizability**: Offers flexibility to develop custom tools or use existing ones, catering to specific agent needs.
|
||||
|
||||
## Using crewAI Tools
|
||||
|
||||
crewAI comes with a series to built-in tools that can be used to extend the capabilities of your agents. Start by installing our extra tools package:
|
||||
|
||||
```bash
|
||||
pip install 'crewai[tools]'
|
||||
```
|
||||
|
||||
Here is an example on how to use them:
|
||||
|
||||
```python
|
||||
import os
|
||||
from crewai import Agent, Task, Crew
|
||||
# Importing some of the crewAI tools
|
||||
from crewai_tools import (
|
||||
DirectoryReadTool,
|
||||
FileReadTool,
|
||||
SeperDevTool,
|
||||
WebsiteSearchTool
|
||||
)
|
||||
|
||||
# get a free account in serper.dev
|
||||
os.environ["SERPER_API_KEY"] = "Your Key"
|
||||
os.environ["OPENAI_API_KEY"] = "Your Key"
|
||||
|
||||
# Instantiate tools
|
||||
# Assumes this ./blog-posts exists with existing blog posts on it
|
||||
docs_tools = DirectoryReadTool(directory='./blog-posts')
|
||||
file_read_tool = FileReadTool()
|
||||
search_tool = SeperDevTool()
|
||||
website_rag = WebsiteSearchTool()
|
||||
|
||||
# Create agents
|
||||
researcher = Agent(
|
||||
role='Market Research Analyst',
|
||||
goal='Provide up-to-date market analysis of the AI industry',
|
||||
backstory='An expert analyst with a keen eye for market trends.',
|
||||
tools=[search_tool, website_rag],
|
||||
verbose=True
|
||||
)
|
||||
|
||||
writer = Agent(
|
||||
role='Content Writer',
|
||||
goal='Write amazing, super engaging blog post about the AI industry',
|
||||
backstory='A skilled writer with a passion for technology.',
|
||||
tools=[docs_tools, file_read_tool],
|
||||
verbose=True
|
||||
)
|
||||
|
||||
# Create tasks
|
||||
research = Task(
|
||||
description='Research the AI industry and provide a summary of the latest most trending matters and developments.',
|
||||
expected_output='A summary of the top 3 latest most trending matters and developments in the AI industry with you unique take on why they matter.',
|
||||
agent=researcher
|
||||
)
|
||||
|
||||
write = Task(
|
||||
description='Write an engaging blog post about the AI industry, using the summary provided by the research analyst. Read the latest blog posts in the directory to get inspiration.',
|
||||
expected_output='A 4 paragraph blog post formatted as markdown with proper subtitles about the latest trends that is engaging and informative and funny, avoid complex words and make it easy to read.',
|
||||
agent=writer,
|
||||
output_file='blog-posts/new_post.md' # The final blog post will be written here
|
||||
)
|
||||
|
||||
|
||||
# Create a crew
|
||||
crew = Crew(
|
||||
agents=[researcher, writer],
|
||||
tasks=[research, write],
|
||||
verbose=2
|
||||
)
|
||||
|
||||
# Execute the tasks
|
||||
crew.kickoff()
|
||||
```
|
||||
|
||||
## Available crewAI Tools
|
||||
|
||||
Most of the tools in the crewAI toolkit offer the ability to set specific arguments or let them to be more wide open, this is the case for most of the tools, for example:
|
||||
|
||||
```python
|
||||
from crewai_tools import DirectoryReadTool
|
||||
|
||||
# This will allow the agent with this tool to read any directory it wants during it's execution
|
||||
tool = DirectoryReadTool()
|
||||
|
||||
# OR
|
||||
|
||||
# This will allow the agent with this tool to read only the directory specified during it's execution
|
||||
toos = DirectoryReadTool(directory='./directory')
|
||||
```
|
||||
|
||||
Specific per tool docs are coming soon.
|
||||
Here is a list of the available tools and their descriptions:
|
||||
|
||||
| Tool | Description |
|
||||
| :-------------------------- | :-------------------------------------------------------------------------------------------- |
|
||||
| **CodeDocsSearchTool** | A RAG tool optimized for searching through code documentation and related technical documents.|
|
||||
| **CSVSearchTool** | A RAG tool designed for searching within CSV files, tailored to handle structured data. |
|
||||
| **DirectorySearchTool** | A RAG tool for searching within directories, useful for navigating through file systems. |
|
||||
| **DOCXSearchTool** | A RAG tool aimed at searching within DOCX documents, ideal for processing Word files. |
|
||||
| **DirectoryReadTool** | Facilitates reading and processing of directory structures and their contents. |
|
||||
| **FileReadTool** | Enables reading and extracting data from files, supporting various file formats. |
|
||||
| **GithubSearchTool** | A RAG tool for searching within GitHub repositories, useful for code and documentation search.|
|
||||
| **SeperDevTool** | A specialized tool for development purposes, with specific functionalities under development. |
|
||||
| **TXTSearchTool** | A RAG tool focused on searching within text (.txt) files, suitable for unstructured data. |
|
||||
| **JSONSearchTool** | A RAG tool designed for searching within JSON files, catering to structured data handling. |
|
||||
| **MDXSearchTool** | A RAG tool tailored for searching within Markdown (MDX) files, useful for documentation. |
|
||||
| **PDFSearchTool** | A RAG tool aimed at searching within PDF documents, ideal for processing scanned documents. |
|
||||
| **PGSearchTool** | A RAG tool optimized for searching within PostgreSQL databases, suitable for database queries. |
|
||||
| **RagTool** | A general-purpose RAG tool capable of handling various data sources and types. |
|
||||
| **ScrapeElementFromWebsiteTool** | Enables scraping specific elements from websites, useful for targeted data extraction. |
|
||||
| **ScrapeWebsiteTool** | Facilitates scraping entire websites, ideal for comprehensive data collection. |
|
||||
| **WebsiteSearchTool** | A RAG tool for searching website content, optimized for web data extraction. |
|
||||
| **XMLSearchTool** | A RAG tool designed for searching within XML files, suitable for structured data formats. |
|
||||
| **YoutubeChannelSearchTool**| A RAG tool for searching within YouTube channels, useful for video content analysis. |
|
||||
| **YoutubeVideoSearchTool** | A RAG tool aimed at searching within YouTube videos, ideal for video data extraction. |
|
||||
|
||||
## Creating your own Tools
|
||||
!!! example "Custom Tool Creation"
|
||||
Developers can craft custom tools tailored for their agent’s needs or utilize pre-built options. Here’s how to create one:
|
||||
Developers can craft custom tools tailored for their agent’s needs or utilize pre-built options:
|
||||
|
||||
To create your own crewAI tools you will need to install our extra tools package:
|
||||
|
||||
```bash
|
||||
pip install 'crewai[tools]'
|
||||
```
|
||||
|
||||
Once you do that there are two main ways for one to create a crewAI tool:
|
||||
|
||||
### Subclassing `BaseTool`
|
||||
|
||||
```python
|
||||
from crewai_tools import BaseTool
|
||||
|
||||
class MyCustomTool(BaseTool):
|
||||
name: str = "Name of my tool"
|
||||
description: str = "Clear description for what this tool is useful for, you agent will need this information to use it."
|
||||
|
||||
def _run(self, argument: str) -> str:
|
||||
# Implementation goes here
|
||||
pass
|
||||
```
|
||||
|
||||
Define a new class inheriting from `BaseTool`, specifying `name`, `description`, and the `_run` method for operational logic.
|
||||
|
||||
|
||||
### Utilizing the `tool` Decorator
|
||||
|
||||
For a simpler approach, create a `Tool` object directly with the required attributes and a functional logic.
|
||||
|
||||
```python
|
||||
from crewai_tools import tool
|
||||
@tool("Name of my tool")
|
||||
def my_tool(question: str) -> str:
|
||||
"""Clear description for what this tool is useful for, you agent will need this information to use it."""
|
||||
# Function logic here
|
||||
```
|
||||
|
||||
```python
|
||||
import json
|
||||
import requests
|
||||
from crewai import Agent
|
||||
from langchain.tools import tool
|
||||
from crewai.tools import tool
|
||||
from unstructured.partition.html import partition_html
|
||||
|
||||
class BrowserTools():
|
||||
|
||||
# Anotate the fuction with the tool decorator from LangChain
|
||||
@tool("Scrape website content")
|
||||
def scrape_website(website):
|
||||
# Write logic for the tool.
|
||||
# In this case a function to scrape website content
|
||||
url = f"https://chrome.browserless.io/content?token={config('BROWSERLESS_API_KEY')}"
|
||||
payload = json.dumps({"url": website})
|
||||
headers = {'cache-control': 'no-cache', 'content-type': 'application/json'}
|
||||
response = requests.request("POST", url, headers=headers, data=payload)
|
||||
elements = partition_html(text=response.text)
|
||||
content = "\n\n".join([str(el) for el in elements])
|
||||
return content[:5000]
|
||||
# Annotate the function with the tool decorator from crewAI
|
||||
@tool("Integration with a given API")
|
||||
def integtation_tool(argument: str) -> str:
|
||||
"""Integration with a given API"""
|
||||
# Code here
|
||||
return resutls # string to be sent back to the agent
|
||||
|
||||
# Assign the scraping tool to an agent
|
||||
agent = Agent(
|
||||
role='Research Analyst',
|
||||
goal='Provide up-to-date market analysis',
|
||||
backstory='An expert analyst with a keen eye for market trends.',
|
||||
tools=[BrowserTools().scrape_website]
|
||||
role='Research Analyst',
|
||||
goal='Provide up-to-date market analysis',
|
||||
backstory='An expert analyst with a keen eye for market trends.',
|
||||
tools=[integtation_tool]
|
||||
)
|
||||
```
|
||||
|
||||
## Using LangChain Tools
|
||||
!!! info "LangChain Integration"
|
||||
CrewAI seamlessly integrates with LangChain’s comprehensive toolkit. Assigning an existing tool to an agent is straightforward:
|
||||
CrewAI seamlessly integrates with LangChain’s comprehensive toolkit for search-based queries and more:
|
||||
|
||||
```python
|
||||
from crewai import Agent
|
||||
from langchain.agents import Tool
|
||||
from langchain.utilities import GoogleSerperAPIWrapper
|
||||
import os
|
||||
|
||||
# Setup API keys
|
||||
os.environ["OPENAI_API_KEY"] = "Your Key"
|
||||
os.environ["SERPER_API_KEY"] = "Your Key"
|
||||
|
||||
search = GoogleSerperAPIWrapper()
|
||||
@@ -77,6 +224,8 @@ agent = Agent(
|
||||
backstory='An expert analyst with a keen eye for market trends.',
|
||||
tools=[serper_tool]
|
||||
)
|
||||
|
||||
# rest of the code ...
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
|
||||
@@ -1,112 +1,113 @@
|
||||
---
|
||||
title: Assembling and Activating Your CrewAI Team
|
||||
description: A step-by-step guide to creating a cohesive CrewAI team for your projects.
|
||||
description: A comprehensive guide to creating a dynamic CrewAI team for your projects, with updated functionalities including verbose mode, memory capabilities, and more.
|
||||
---
|
||||
|
||||
## Introduction
|
||||
Embarking on your CrewAI journey involves a few straightforward steps to set up your environment and initiate your AI crew. This guide ensures a seamless start.
|
||||
Embark on your CrewAI journey by setting up your environment and initiating your AI crew with enhanced features. This guide ensures a seamless start, incorporating the latest updates.
|
||||
|
||||
## Step 0: Installation
|
||||
Begin by installing CrewAI and any additional packages required for your project. For instance, the `duckduckgo-search` package is used in this example for enhanced search capabilities.
|
||||
Install CrewAI and any necessary packages for your project. The `duckduckgo-search` package is highlighted here for enhanced search capabilities.
|
||||
|
||||
```shell
|
||||
pip install crewai
|
||||
pip install crewai[tools]
|
||||
pip install duckduckgo-search
|
||||
```
|
||||
|
||||
## Step 1: Assemble Your Agents
|
||||
Begin by defining your agents with distinct roles and backstories. These elements not only add depth but also guide their task execution and interaction within the crew.
|
||||
Define your agents with distinct roles, backstories, and now, enhanced capabilities such as verbose mode and memory usage. These elements add depth and guide their task execution and interaction within the crew.
|
||||
|
||||
```python
|
||||
import os
|
||||
os.environ["OPENAI_API_KEY"] = "Your Key"
|
||||
|
||||
from crewai import Agent
|
||||
from langchain_community.tools import DuckDuckGoSearchRun
|
||||
search_tool = DuckDuckGoSearchRun()
|
||||
|
||||
# Topic that will be used in the crew run
|
||||
# Topic for the crew run
|
||||
topic = 'AI in healthcare'
|
||||
|
||||
# Creating a senior researcher agent
|
||||
# Creating a senior researcher agent with memory and verbose mode
|
||||
researcher = Agent(
|
||||
role='Senior Researcher',
|
||||
goal=f'Uncover groundbreaking technologies around {topic}',
|
||||
goal=f'Uncover groundbreaking technologies in {topic}',
|
||||
verbose=True,
|
||||
memory=True,
|
||||
backstory="""Driven by curiosity, you're at the forefront of
|
||||
innovation, eager to explore and share knowledge that could change
|
||||
the world."""
|
||||
the world.""",
|
||||
tools=[search_tool],
|
||||
allow_delegation=True
|
||||
)
|
||||
|
||||
# Creating a writer agent
|
||||
# Creating a writer agent with custom tools and delegation capability
|
||||
writer = Agent(
|
||||
role='Writer',
|
||||
goal=f'Narrate compelling tech stories around {topic}',
|
||||
goal=f'Narrate compelling tech stories about {topic}',
|
||||
verbose=True,
|
||||
memory=True,
|
||||
backstory="""With a flair for simplifying complex topics, you craft
|
||||
engaging narratives that captivate and educate, bringing new
|
||||
discoveries to light in an accessible manner."""
|
||||
discoveries to light in an accessible manner.""",
|
||||
tools=[search_tool],
|
||||
allow_delegation=False
|
||||
)
|
||||
```
|
||||
|
||||
## Step 2: Define the Tasks
|
||||
Detail the specific objectives for your agents. These tasks guide their focus and ensure a targeted approach to their roles.
|
||||
Detail the specific objectives for your agents, including new features for asynchronous execution and output customization. These tasks ensure a targeted approach to their roles.
|
||||
|
||||
```python
|
||||
from crewai import Task
|
||||
|
||||
# Install duckduckgo-search for this example:
|
||||
# !pip install -U duckduckgo-search
|
||||
|
||||
from langchain_community.tools import DuckDuckGoSearchRun
|
||||
search_tool = DuckDuckGoSearchRun()
|
||||
|
||||
# Research task for identifying AI trends
|
||||
# Research task
|
||||
research_task = Task(
|
||||
description=f"""Identify the next big trend in {topic}.
|
||||
Focus on identifying pros and cons and the overall narrative.
|
||||
|
||||
Your final report should clearly articulate the key points,
|
||||
its market opportunities, and potential risks.
|
||||
""",
|
||||
its market opportunities, and potential risks.""",
|
||||
expected_output='A comprehensive 3 paragraphs long report on the latest AI trends.',
|
||||
max_inter=3,
|
||||
tools=[search_tool],
|
||||
agent=researcher
|
||||
agent=researcher,
|
||||
)
|
||||
|
||||
# Writing task based on research findings
|
||||
# Writing task with language model configuration
|
||||
write_task = Task(
|
||||
description=f"""Compose an insightful article on {topic}.
|
||||
Focus on the latest trends and how it's impacting the industry.
|
||||
This article should be easy to understand, engaging and positive.
|
||||
""",
|
||||
expected_output=f'A 4 paragraph article on {topic} advancements.',
|
||||
This article should be easy to understand, engaging, and positive.""",
|
||||
expected_output=f'A 4 paragraph article on {topic} advancements fromated as markdown.',
|
||||
tools=[search_tool],
|
||||
agent=writer
|
||||
agent=writer,
|
||||
async_execution=False,
|
||||
output_file='new-blog-post.md' # Example of output customization
|
||||
)
|
||||
```
|
||||
|
||||
## Step 3: Form the Crew
|
||||
Combine your agents into a crew, setting the workflow process they'll follow to accomplish the tasks.
|
||||
Combine your agents into a crew, setting the workflow process they'll follow to accomplish the tasks, now with the option to configure language models for enhanced interaction.
|
||||
|
||||
```python
|
||||
from crewai import Crew, Process
|
||||
|
||||
# Forming the tech-focused crew
|
||||
# Forming the tech-focused crew with enhanced configurations
|
||||
crew = Crew(
|
||||
agents=[researcher, writer],
|
||||
tasks=[research_task, write_task],
|
||||
process=Process.sequential # Sequential task execution
|
||||
process=Process.sequential # Optional: Sequential task execution is default
|
||||
)
|
||||
```
|
||||
|
||||
## Step 4: Kick It Off
|
||||
With your crew ready and the stage set, initiate the process. Watch as your agents collaborate, each contributing their expertise to achieve the collective goal.
|
||||
Initiate the process with your enhanced crew ready. Observe as your agents collaborate, leveraging their new capabilities for a successful project outcome.
|
||||
|
||||
```python
|
||||
# Starting the task execution process
|
||||
# Starting the task execution process with enhanced feedback
|
||||
result = crew.kickoff()
|
||||
print(result)
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
Building and activating a crew in CrewAI is a seamless process. By carefully assigning roles, tasks, and a clear process, your AI team is equipped to tackle challenges efficiently. The depth of agent backstories and the precision of their objectives enrich the collaboration, leading to successful project outcomes.
|
||||
Building and activating a crew in CrewAI has evolved with new functionalities. By incorporating verbose mode, memory capabilities, asynchronous task execution, output customization, and language model configuration, your AI team is more equipped than ever to tackle challenges efficiently. The depth of agent backstories and the precision of their objectives enrich collaboration, leading to successful project outcomes.
|
||||
|
||||
@@ -1,55 +1,72 @@
|
||||
---
|
||||
title: Customizing Agents in CrewAI
|
||||
description: A guide to tailoring agents for specific roles and tasks within the CrewAI framework.
|
||||
description: A comprehensive guide to tailoring agents for specific roles, tasks, and advanced customizations within the CrewAI framework.
|
||||
---
|
||||
|
||||
## Customizable Attributes
|
||||
Tailoring your AI agents is pivotal in crafting an efficient CrewAI team. Customization allows agents to be dynamically adapted to the unique requirements of any project.
|
||||
Crafting an efficient CrewAI team hinges on the ability to tailor your AI agents dynamically to meet the unique requirements of any project. This section covers the foundational attributes you can customize.
|
||||
|
||||
### Key Attributes for Customization
|
||||
- **Role**: Defines the agent's job within the crew, such as 'Analyst' or 'Customer Service Rep'.
|
||||
- **Goal**: The agent's objective, aligned with its role and the crew's overall goals.
|
||||
- **Backstory**: Adds depth to the agent's character, enhancing its role and motivations within the crew.
|
||||
- **Tools**: The capabilities or methods the agent employs to accomplish tasks, ranging from simple functions to complex integrations.
|
||||
- **Role**: Specifies the agent's job within the crew, such as 'Analyst' or 'Customer Service Rep'.
|
||||
- **Goal**: Defines what the agent aims to achieve, in alignment with its role and the overarching objectives of the crew.
|
||||
- **Backstory**: Provides depth to the agent's persona, enriching its motivations and engagements within the crew.
|
||||
- **Tools**: Represents the capabilities or methods the agent uses to perform tasks, from simple functions to intricate integrations.
|
||||
|
||||
## Understanding Tools in CrewAI
|
||||
Tools empower agents with functionalities to interact and manipulate their environment, from generic utilities to specialized functions. Integrating with LangChain offers access to a broad range of tools for diverse tasks.
|
||||
## Advanced Customization Options
|
||||
Beyond the basic attributes, CrewAI allows for deeper customization to enhance an agent's behavior and capabilities significantly.
|
||||
|
||||
### Language Model Customization
|
||||
Agents can be customized with specific language models (`llm`) and function-calling language models (`function_calling_llm`), offering advanced control over their processing and decision-making abilities.
|
||||
|
||||
### Enabling Memory for Agents
|
||||
CrewAI supports memory for agents, enabling them to remember past interactions. This feature is critical for tasks requiring awareness of previous contexts or decisions.
|
||||
|
||||
## Performance and Debugging Settings
|
||||
Adjusting an agent's performance and monitoring its operations are crucial for efficient task execution.
|
||||
|
||||
### Verbose Mode and RPM Limit
|
||||
- **Verbose Mode**: Enables detailed logging of an agent's actions, useful for debugging and optimization.
|
||||
- **RPM Limit**: Sets the maximum number of requests per minute (`max_rpm`), controlling the agent's query frequency to external services.
|
||||
|
||||
### Maximum Iterations for Task Execution
|
||||
The `max_iter` attribute allows users to define the maximum number of iterations an agent can perform for a single task, preventing infinite loops or excessively long executions.
|
||||
|
||||
## Customizing Agents and Tools
|
||||
Agents are customized by defining their attributes during initialization, with tools being a critical aspect of their functionality.
|
||||
Agents are customized by defining their attributes and tools during initialization. Tools are critical for an agent's functionality, enabling them to perform specialized tasks. In this example we will use the crewAI tools package to create a tool for a research analyst agent.
|
||||
|
||||
```shell
|
||||
pip install 'crewai[tools]'
|
||||
```
|
||||
|
||||
### Example: Assigning Tools to an Agent
|
||||
```python
|
||||
from crewai import Agent
|
||||
from langchain.agents import Tool
|
||||
from langchain.utilities import GoogleSerperAPIWrapper
|
||||
import os
|
||||
from crewai import Agent
|
||||
from crewai_tools import SeperDevTool
|
||||
|
||||
# Set API keys for tool initialization
|
||||
os.environ["OPENAI_API_KEY"] = "Your Key"
|
||||
os.environ["SERPER_API_KEY"] = "Your Key"
|
||||
|
||||
# Initialize a search tool
|
||||
search_tool = GoogleSerperAPIWrapper()
|
||||
search_tool = SeperDevTool()
|
||||
|
||||
# Define and assign the tool to an agent
|
||||
serper_tool = Tool(
|
||||
name="Intermediate Answer",
|
||||
func=search_tool.run,
|
||||
description="Useful for search-based queries"
|
||||
)
|
||||
|
||||
# Initialize the agent with the tool
|
||||
# Initialize the agent with advanced options
|
||||
agent = Agent(
|
||||
role='Research Analyst',
|
||||
goal='Provide up-to-date market analysis',
|
||||
backstory='An expert analyst with a keen eye for market trends.',
|
||||
tools=[serper_tool]
|
||||
tools=[serper_tool],
|
||||
memory=True,
|
||||
verbose=True,
|
||||
max_rpm=10, # Optinal: Limit requests to 10 per minute, preventing API abuse
|
||||
max_iter=5, # Optional: Limit task iterations to 5 before the agent tried to gives its best answer
|
||||
allow_delegation=False
|
||||
)
|
||||
```
|
||||
|
||||
## Delegation and Autonomy
|
||||
Agents in CrewAI can delegate tasks or ask questions, enhancing the crew's collaborative dynamics. This feature can be disabled to ensure straightforward task execution.
|
||||
Controlling an agent's ability to delegate tasks or ask questions is vital for tailoring its autonomy and collaborative dynamics within the CrewAI framework.
|
||||
|
||||
### Example: Disabling Delegation for an Agent
|
||||
```python
|
||||
@@ -62,4 +79,4 @@ agent = Agent(
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
Customizing agents is key to leveraging the full potential of CrewAI. By thoughtfully setting agents' roles, goals, backstories, and tools, you craft a nuanced and capable AI team ready to tackle complex challenges.
|
||||
Customizing agents in CrewAI by setting their roles, goals, backstories, and tools, alongside advanced options like language model customization, memory, and performance settings, equips a nuanced and capable AI team ready for complex challenges.
|
||||
@@ -1,36 +1,36 @@
|
||||
---
|
||||
title: Implementing the Hierarchical Process in CrewAI
|
||||
description: Understanding and applying the hierarchical process within your CrewAI projects.
|
||||
description: Understanding and applying the hierarchical process within your CrewAI projects, with updates reflecting the latest coding practices.
|
||||
---
|
||||
|
||||
## Introduction
|
||||
The hierarchical process in CrewAI introduces a structured approach to task management, mimicking traditional organizational hierarchies for efficient task delegation and execution.
|
||||
The hierarchical process in CrewAI introduces a structured approach to managing tasks, mimicking traditional organizational hierarchies for efficient task delegation and execution. This ensures a systematic workflow that enhances project outcomes.
|
||||
|
||||
!!! note "Complexity"
|
||||
The current implementation of the hierarchical process relies on tools usage that usually require more complex models like GPT-4 and usually imply of a higher token usage.
|
||||
!!! note "Complexity and Efficiency"
|
||||
The hierarchical process is designed to leverage advanced models like GPT-4, optimizing token usage while handling complex tasks with greater efficiency.
|
||||
|
||||
## Hierarchical Process Overview
|
||||
In this process, tasks are assigned and executed based on a defined hierarchy, where a 'manager' agent coordinates the workflow, delegating tasks to other agents and validating their outcomes before proceeding.
|
||||
Tasks within this process are managed through a clear hierarchy, where a 'manager' agent coordinates the workflow, delegates tasks, and validates outcomes, ensuring a streamlined and effective execution process.
|
||||
|
||||
### Key Features
|
||||
- **Task Delegation**: A manager agent oversees task distribution among crew members.
|
||||
- **Result Validation**: The manager reviews outcomes before passing tasks along, ensuring quality and relevance.
|
||||
- **Efficient Workflow**: Mimics corporate structures for a familiar and organized task management approach.
|
||||
- **Task Delegation**: A manager agent is responsible for allocating tasks among crew members based on their roles and capabilities.
|
||||
- **Result Validation**: The manager evaluates the outcomes to ensure they meet the required standards before moving forward.
|
||||
- **Efficient Workflow**: Emulates corporate structures, offering an organized and familiar approach to task management.
|
||||
|
||||
## Implementing the Hierarchical Process
|
||||
To utilize the hierarchical process, you must define a crew with a designated manager and a clear chain of command for task execution.
|
||||
To adopt the hierarchical process, define a crew with a designated manager and establish a clear chain of command for task execution. This structure is crucial for maintaining an orderly and efficient workflow.
|
||||
|
||||
!!! note "Tools on the hierarchical process"
|
||||
For tools when using the hierarchical process, you want to make sure to assign them to the agents instead of the tasks, as the manager will be the one delegating the tasks and the agents will be the ones executing them.
|
||||
!!! note "Tools and Agent Assignment"
|
||||
Tools should be assigned at the agent level, not the task level, to facilitate task delegation and execution by the designated agents under the manager's guidance.
|
||||
|
||||
!!! note "Manager LLM"
|
||||
A manager will be automatically set for the crew, you don't need to define it. You do need to set the `manager_llm` parameter in the crew though.
|
||||
!!! note "Manager LLM Configuration"
|
||||
A manager LLM is automatically assigned to the crew, eliminating the need for manual definition. However, configuring the `manager_llm` parameter is necessary to tailor the manager's decision-making process.
|
||||
|
||||
```python
|
||||
from langchain_openai import ChatOpenAI
|
||||
from crewai import Crew, Process, Agent
|
||||
|
||||
# Define your agents, no need to define a manager
|
||||
# Agents are defined without specifying a manager explicitly
|
||||
researcher = Agent(
|
||||
role='Researcher',
|
||||
goal='Conduct in-depth analysis',
|
||||
@@ -42,19 +42,19 @@ writer = Agent(
|
||||
# tools = [...]
|
||||
)
|
||||
|
||||
# Form the crew with a hierarchical process
|
||||
# Establishing the crew with a hierarchical process
|
||||
project_crew = Crew(
|
||||
tasks=[...], # Tasks that that manager will figure out how to complete
|
||||
tasks=[...], # Tasks to be delegated and executed under the manager's supervision
|
||||
agents=[researcher, writer],
|
||||
manager_llm=ChatOpenAI(temperature=0, model="gpt-4"), # The manager's LLM that will be used internally
|
||||
process=Process.hierarchical # Designating the hierarchical approach
|
||||
manager_llm=ChatOpenAI(temperature=0, model="gpt-4"), # Defines the manager's decision-making engine
|
||||
process=Process.hierarchical # Specifies the hierarchical management approach
|
||||
)
|
||||
```
|
||||
|
||||
### Workflow in Action
|
||||
1. **Task Assignment**: The manager assigns tasks based on agent roles and capabilities.
|
||||
2. **Execution and Review**: Agents perform their tasks, with the manager reviewing outcomes for approval.
|
||||
3. **Sequential Task Progression**: Tasks are completed in a sequence dictated by the manager, ensuring orderly progression.
|
||||
1. **Task Assignment**: The manager strategically assigns tasks, considering each agent's role and skills.
|
||||
2. **Execution and Review**: Agents complete their tasks, followed by a thorough review by the manager to ensure quality standards.
|
||||
3. **Sequential Task Progression**: The manager ensures tasks are completed in a logical order, facilitating smooth project progression.
|
||||
|
||||
## Conclusion
|
||||
The hierarchical process in CrewAI offers a familiar, structured way to manage tasks within a project. By leveraging a chain of command, it enhances efficiency and quality control, making it ideal for complex projects requiring meticulous oversight.
|
||||
Adopting the hierarchical process in CrewAI facilitates a well-organized and efficient approach to project management. By structuring tasks and delegations within a clear hierarchy, it enhances both productivity and quality control, making it an ideal strategy for managing complex projects.
|
||||
@@ -1,10 +1,12 @@
|
||||
# Human Input on Execution
|
||||
# Human Input in Agent Execution
|
||||
|
||||
Human inputs is important in many agent execution use cases, humans are AGI so they can can be prompted to step in and provide extra details ins necessary.
|
||||
Using it with crewAI is pretty straightforward and you can do it through a LangChain Tool.
|
||||
Check [LangChain Integration](https://python.langchain.com/docs/integrations/tools/human_tools) for more details:
|
||||
Human input is crucial in numerous agent execution scenarios, enabling agents to request additional information or clarification when necessary. This feature is particularly useful in complex decision-making processes or when agents require further details to complete a task effectively.
|
||||
|
||||
Example:
|
||||
## Using Human Input with CrewAI
|
||||
|
||||
Incorporating human input with CrewAI is straightforward, enhancing the agent's ability to make informed decisions. While the documentation previously mentioned using a "LangChain Tool" and a specific "DuckDuckGoSearchRun" tool from `langchain_community.tools`, it's important to clarify that the integration of such tools should align with the actual capabilities and configurations defined within your `Agent` class setup.
|
||||
|
||||
### Example:
|
||||
|
||||
```python
|
||||
import os
|
||||
@@ -20,15 +22,14 @@ human_tools = load_tools(["human"])
|
||||
# Define your agents with roles and goals
|
||||
researcher = Agent(
|
||||
role='Senior Research Analyst',
|
||||
goal='Uncover cutting-edge developments in AI and data science in',
|
||||
goal='Uncover cutting-edge developments in AI and data science',
|
||||
backstory="""You are a Senior Research Analyst at a leading tech think tank.
|
||||
Your expertise lies in identifying emerging trends and technologies in AI and
|
||||
data science. You have a knack for dissecting complex data and presenting
|
||||
actionable insights.""",
|
||||
verbose=True,
|
||||
allow_delegation=False,
|
||||
# Passing human tools to the agent
|
||||
tools=[search_tool]+human_tools
|
||||
tools=[search_tool]+human_tools # Passing human tools to the agent
|
||||
)
|
||||
writer = Agent(
|
||||
role='Tech Content Strategist',
|
||||
@@ -41,13 +42,12 @@ writer = Agent(
|
||||
)
|
||||
|
||||
# Create tasks for your agents
|
||||
# Being explicit on the task to ask for human feedback.
|
||||
task1 = Task(
|
||||
description="""Conduct a comprehensive analysis of the latest advancements in AI in 2024.
|
||||
Identify key trends, breakthrough technologies, and potential industry impacts.
|
||||
Compile your findings in a detailed report.
|
||||
Make sure to check with the human if the draft is good before returning your Final Answer.
|
||||
Your final answer MUST be a full analysis report""",
|
||||
Make sure to check with a human if the draft is good before finalizing your answer.""",
|
||||
expected_output='A comprehensive full report on the latest AI advancements in 2024, leave nothing out',
|
||||
agent=researcher
|
||||
)
|
||||
|
||||
@@ -58,6 +58,7 @@ task2 = Task(
|
||||
Aim for a narrative that captures the essence of these breakthroughs and their
|
||||
implications for the future.
|
||||
Your final answer MUST be the full blog post of at least 3 paragraphs.""",
|
||||
expected_output='A compelling 3 paragraphs blog post formated as markdown about the latest AI advancements in 2024',
|
||||
agent=writer
|
||||
)
|
||||
|
||||
|
||||
@@ -5,81 +5,71 @@ description: Guide on integrating CrewAI with various Large Language Models (LLM
|
||||
|
||||
## Connect CrewAI to LLMs
|
||||
!!! note "Default LLM"
|
||||
By default, crewAI uses OpenAI's GPT-4 model for language processing. However, you can configure your agents to use a different model or API. This guide will show you how to connect your agents to different LLMs. You can change the specific gpt model by setting the `OPENAI_MODEL_NAME` environment variable.
|
||||
By default, CrewAI uses OpenAI's GPT-4 model for language processing. However, you can configure your agents to use a different model or API. This guide will show you how to connect your agents to different LLMs through environment variables and direct instantiation.
|
||||
|
||||
CrewAI offers flexibility in connecting to various LLMs, including local models via [Ollama](https://ollama.ai) and different APIs like Azure. It's compatible with all [LangChain LLM](https://python.langchain.com/docs/integrations/llms/) components, enabling diverse integrations for tailored AI solutions.
|
||||
|
||||
## CrewAI Agent Overview
|
||||
The `Agent` class in CrewAI is central to implementing AI solutions. Here's a brief overview:
|
||||
|
||||
## Ollama Integration
|
||||
Ollama is preferred for local LLM integration, offering customization and privacy benefits. It requires installation and configuration, including model adjustments via a Modelfile to optimize performance.
|
||||
|
||||
### Setting Up Ollama
|
||||
- **Installation**: Follow Ollama's guide for setup.
|
||||
- **Configuration**: [Adjust your local model with a Modelfile](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md), considering adding `Observation` as a stop word and playing with parameters like `top_p` and `temperature`.
|
||||
|
||||
### Integrating Ollama with CrewAI
|
||||
Instantiate Ollama and pass it to your agents within CrewAI, enhancing them with the local model's capabilities.
|
||||
- **Attributes**:
|
||||
- `role`: Defines the agent's role within the solution.
|
||||
- `goal`: Specifies the agent's objective.
|
||||
- `backstory`: Provides a background story to the agent.
|
||||
- `llm`: Indicates the Large Language Model the agent uses.
|
||||
|
||||
### Example Changing OpenAI's GPT model
|
||||
```python
|
||||
# Required
|
||||
os.environ["OPENAI_API_BASE"]='http://localhost:11434/v1'
|
||||
os.environ["OPENAI_MODEL_NAME"]='openhermes'
|
||||
os.environ["OPENAI_API_KEY"]=''
|
||||
os.environ["OPENAI_MODEL_NAME"]="gpt-4-0125-preview"
|
||||
|
||||
local_expert = Agent(
|
||||
# Agent will automatically use the model defined in the environment variable
|
||||
example_agent = Agent(
|
||||
role='Local Expert',
|
||||
goal='Provide insights about the city',
|
||||
backstory="A knowledgeable local guide.",
|
||||
tools=[SearchTools.search_internet, BrowserTools.scrape_and_summarize_website],
|
||||
verbose=True
|
||||
)
|
||||
```
|
||||
|
||||
## OpenAI Compatible API Endpoints
|
||||
You can use environment variables for easy switch between APIs and models, supporting diverse platforms like FastChat, LM Studio, and Mistral AI.
|
||||
## Ollama Integration
|
||||
Ollama is preferred for local LLM integration, offering customization and privacy benefits. To integrate Ollama with CrewAI, set the appropriate environment variables as shown below. Note: Detailed Ollama setup is beyond this document's scope, but general guidance is provided.
|
||||
|
||||
### Configuration Examples
|
||||
|
||||
### Ollama
|
||||
### Setting Up Ollama
|
||||
- **Environment Variables Configuration**: To integrate Ollama, set the following environment variables:
|
||||
```sh
|
||||
OPENAI_API_BASE='http://localhost:11434/v1'
|
||||
OPENAI_MODEL_NAME='openhermes' # Depending on the model you have available
|
||||
OPENAI_API_KEY=NA
|
||||
OPENAI_MODEL_NAME='openhermes' # Adjust based on available model
|
||||
OPENAI_API_KEY=''
|
||||
```
|
||||
|
||||
### FastChat
|
||||
## OpenAI Compatible API Endpoints
|
||||
Switch between APIs and models seamlessly using environment variables, supporting platforms like FastChat, LM Studio, and Mistral AI.
|
||||
|
||||
### Configuration Examples
|
||||
#### FastChat
|
||||
```sh
|
||||
|
||||
OPENAI_API_BASE="http://localhost:8001/v1"
|
||||
OPENAI_MODEL_NAME='oh-2.5m7b-q51' # Depending on the model you have available
|
||||
OPENAI_MODEL_NAME='oh-2.5m7b-q51'
|
||||
OPENAI_API_KEY=NA
|
||||
```
|
||||
|
||||
### LM Studio
|
||||
#### LM Studio
|
||||
```sh
|
||||
OPENAI_API_BASE="http://localhost:8000/v1"
|
||||
OPENAI_MODEL_NAME=NA
|
||||
OPENAI_API_KEY=NA
|
||||
```
|
||||
|
||||
### Mistral API
|
||||
#### Mistral API
|
||||
```sh
|
||||
OPENAI_API_KEY=your-mistral-api-key
|
||||
OPENAI_API_BASE=https://api.mistral.ai/v1
|
||||
OPENAI_MODEL_NAME="mistral-small" # Check documentation for available models
|
||||
OPENAI_MODEL_NAME="mistral-small"
|
||||
```
|
||||
|
||||
### text-gen-web-ui
|
||||
```sh
|
||||
OPENAI_API_BASE=http://localhost:5000/v1
|
||||
OPENAI_MODEL_NAME=NA
|
||||
OPENAI_API_KEY=NA
|
||||
```
|
||||
|
||||
### Azure Open AI
|
||||
Azure's OpenAI API needs a distinct setup, utilizing the `langchain_openai` component for Azure-specific configurations.
|
||||
|
||||
Configuration settings:
|
||||
### Azure Open AI Configuration
|
||||
For Azure OpenAI API integration, set the following environment variables:
|
||||
```sh
|
||||
AZURE_OPENAI_VERSION="2022-12-01"
|
||||
AZURE_OPENAI_DEPLOYMENT=""
|
||||
@@ -87,22 +77,24 @@ AZURE_OPENAI_ENDPOINT=""
|
||||
AZURE_OPENAI_KEY=""
|
||||
```
|
||||
|
||||
### Example Agent with Azure LLM
|
||||
```python
|
||||
from dotenv import load_dotenv
|
||||
from crewai import Agent
|
||||
from langchain_openai import AzureChatOpenAI
|
||||
|
||||
load_dotenv()
|
||||
|
||||
default_llm = AzureChatOpenAI(
|
||||
azure_llm = AzureChatOpenAI(
|
||||
azure_endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"),
|
||||
api_key=os.environ.get("AZURE_OPENAI_KEY")
|
||||
)
|
||||
|
||||
example_agent = Agent(
|
||||
azure_agent = Agent(
|
||||
role='Example Agent',
|
||||
goal='Demonstrate custom LLM configuration',
|
||||
backstory='A diligent explorer of GitHub docs.',
|
||||
llm=default_llm
|
||||
llm=azure_llm
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
@@ -1,37 +1,47 @@
|
||||
---
|
||||
title: Implementing the Sequential Process in CrewAI
|
||||
description: A guide to utilizing the sequential process for task execution in CrewAI projects.
|
||||
title: Using the Sequential Processes in crewAI
|
||||
description: A comprehensive guide to utilizing the sequential processe for task execution in crewAI projects.
|
||||
---
|
||||
|
||||
## Introduction
|
||||
The sequential process in CrewAI ensures tasks are executed one after the other, following a linear progression. This approach is akin to a relay race, where each agent completes their task before passing the baton to the next.
|
||||
CrewAI offers a flexible framework for executing tasks in a structured manner, supporting both sequential and hierarchical processes. This guide outlines how to effectively implement these processes to ensure efficient task execution and project completion.
|
||||
|
||||
## Sequential Process Overview
|
||||
This process is straightforward and effective, particularly for projects where tasks must be completed in a specific order to achieve the desired outcome.
|
||||
The sequential process ensures tasks are executed one after the other, following a linear progression. This approach is ideal for projects requiring tasks to be completed in a specific order.
|
||||
|
||||
### Key Features
|
||||
- **Linear Task Flow**: Tasks are handled in a predetermined sequence, ensuring orderly progression.
|
||||
- **Simplicity**: Ideal for projects with clearly defined, step-by-step tasks.
|
||||
- **Easy Monitoring**: Task completion can be easily tracked, offering clear insights into project progress.
|
||||
- **Linear Task Flow**: Ensures orderly progression by handling tasks in a predetermined sequence.
|
||||
- **Simplicity**: Best suited for projects with clear, step-by-step tasks.
|
||||
- **Easy Monitoring**: Facilitates easy tracking of task completion and project progress.
|
||||
|
||||
|
||||
## Implementing the Sequential Process
|
||||
To apply the sequential process, assemble your crew and define the tasks in the order they need to be executed.
|
||||
|
||||
!!! note "Task assignment"
|
||||
In the sequential process you need to make sure all tasks are assigned to the agents, as the agents will be the ones executing them.
|
||||
Assemble your crew and define tasks in the order they need to be executed.
|
||||
|
||||
```python
|
||||
from crewai import Crew, Process, Agent, Task
|
||||
|
||||
# Define your agents
|
||||
researcher = Agent(role='Researcher', goal='Conduct foundational research')
|
||||
analyst = Agent(role='Data Analyst', goal='Analyze research findings')
|
||||
writer = Agent(role='Writer', goal='Draft the final report')
|
||||
researcher = Agent(
|
||||
role='Researcher',
|
||||
goal='Conduct foundational research',
|
||||
backstory='An experienced researcher with a passion for uncovering insights'
|
||||
)
|
||||
analyst = Agent(
|
||||
role='Data Analyst',
|
||||
goal='Analyze research findings',
|
||||
backstory='A meticulous analyst with a knack for uncovering patterns'
|
||||
)
|
||||
writer = Agent(
|
||||
role='Writer',
|
||||
goal='Draft the final report',
|
||||
backstory='A skilled writer with a talent for crafting compelling narratives'
|
||||
)
|
||||
|
||||
# Define the tasks in sequence
|
||||
research_task = Task(description='Gather relevant data', agent=researcher)
|
||||
analysis_task = Task(description='Analyze the data', agent=analyst)
|
||||
writing_task = Task(description='Compose the report', agent=writer)
|
||||
research_task = Task(description='Gather relevant data...', agent=researcher)
|
||||
analysis_task = Task(description='Analyze the data...', agent=analyst)
|
||||
writing_task = Task(description='Compose the report...', agent=writer)
|
||||
|
||||
# Form the crew with a sequential process
|
||||
report_crew = Crew(
|
||||
@@ -42,9 +52,9 @@ report_crew = Crew(
|
||||
```
|
||||
|
||||
### Workflow in Action
|
||||
1. **Initial Task**: The first agent completes their task and signals completion.
|
||||
2. **Subsequent Tasks**: Following agents pick up their tasks in the order defined, using the outcomes of preceding tasks as inputs.
|
||||
3. **Completion**: The process concludes once the final task is executed, culminating in the project's completion.
|
||||
1. **Initial Task**: In a sequential process, the first agent completes their task and signals completion.
|
||||
2. **Subsequent Tasks**: Agents pick up their tasks based on the process type, with outcomes of preceding tasks or manager directives guiding their execution.
|
||||
3. **Completion**: The process concludes once the final task is executed, leading to project completion.
|
||||
|
||||
## Conclusion
|
||||
The sequential process in CrewAI provides a clear, straightforward path for task execution. It's particularly suited for projects requiring a logical progression of tasks, ensuring each step is completed before the next begins, thereby facilitating a cohesive final product.
|
||||
|
||||
@@ -44,7 +44,7 @@ Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="./how-to/how-to/Sequential">
|
||||
<a href="./how-to/Sequential">
|
||||
Using Sequential Process
|
||||
</a>
|
||||
</li>
|
||||
|
||||
@@ -1,29 +1,23 @@
|
||||
## Telemetry
|
||||
|
||||
CrewAI uses anonymous telemetry to collect usage data with the main purpose of helping us improve the library by focusing our efforts on the most used features, integrations and tools.
|
||||
CrewAI utilizes anonymous telemetry to gather usage statistics with the primary goal of enhancing the library. Our focus is on improving and developing the features, integrations, and tools most utilized by our users.
|
||||
|
||||
There is NO data being collected on the prompts, tasks descriptions agents backstories or goals nor tools usage, no API calls, nor responses nor any data that is being processed by the agents, nor any secrets and env vars.
|
||||
It's pivotal to understand that **NO data is collected** concerning prompts, task descriptions, agents' backstories or goals, usage of tools, API calls, responses, any data processed by the agents, or secrets and environment variables.
|
||||
|
||||
Data collected includes:
|
||||
- Version of crewAI
|
||||
- So we can understand how many users are using the latest version
|
||||
- Version of Python
|
||||
- So we can decide on what versions to better support
|
||||
- General OS (e.g. number of CPUs, macOS/Windows/Linux)
|
||||
- So we know what OS we should focus on and if we could build specific OS related features
|
||||
- Number of agents and tasks in a crew
|
||||
- So we make sure we are testing internally with similar use cases and educate people on the best practices
|
||||
- Crew Process being used
|
||||
- Understand where we should focus our efforts
|
||||
- If Agents are using memory or allowing delegation
|
||||
- Understand if we improved the features or maybe even drop them
|
||||
- If Tasks are being executed in parallel or sequentially
|
||||
- Understand if we should focus more on parallel execution
|
||||
- Language model being used
|
||||
- Improved support on most used languages
|
||||
- Roles of agents in a crew
|
||||
- Understand high level use cases so we can build better tools, integrations and examples about it
|
||||
- Tools names available
|
||||
- Understand out of the publically available tools, which ones are being used the most so we can improve them
|
||||
### Data Collected Includes:
|
||||
- **Version of CrewAI**: Assessing the adoption rate of our latest version helps us understand user needs and guide our updates.
|
||||
- **Python Version**: Identifying the Python versions our users operate with assists in prioritizing our support efforts for these versions.
|
||||
- **General OS Information**: Details like the number of CPUs and the operating system type (macOS, Windows, Linux) enable us to focus our development on the most used operating systems and explore the potential for OS-specific features.
|
||||
- **Number of Agents and Tasks in a Crew**: Ensures our internal testing mirrors real-world scenarios, helping us guide users towards best practices.
|
||||
- **Crew Process Utilization**: Understanding how crews are utilized aids in directing our development focus.
|
||||
- **Memory and Delegation Use by Agents**: Insights into how these features are used help evaluate their effectiveness and future.
|
||||
- **Task Execution Mode**: Knowing whether tasks are executed in parallel or sequentially influences our emphasis on enhancing parallel execution capabilities.
|
||||
- **Language Model Utilization**: Supports our goal to improve support for the most popular languages among our users.
|
||||
- **Roles of Agents within a Crew**: Understanding the various roles agents play aids in crafting better tools, integrations, and examples.
|
||||
- **Tool Usage**: Identifying which tools are most frequently used allows us to prioritize improvements in those areas.
|
||||
|
||||
Users can opt-in sharing the complete telemetry data by setting the `share_crew` attribute to `True` on their Crews.
|
||||
### Opt-In Futher Telemetry Sharing
|
||||
Users can choose to share their complete telemetry data by enabling the `share_crew` attribute to `True` in their crew configurations. This opt-in approach respects user privacy and aligns with data protection standards by ensuring users have control over their data sharing preferences.
|
||||
|
||||
### Updates and Revisions
|
||||
We are committed to maintaining the accuracy and transparency of our documentation. Regular reviews and updates are performed to ensure our documentation accurately reflects the latest developments of our codebase and telemetry practices. Users are encouraged to review this section for the most current information on our data collection practices and how they contribute to the improvement of CrewAI.
|
||||
|
||||
2938
poetry.lock
generated
2938
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
|
||||
[tool.poetry]
|
||||
name = "crewai"
|
||||
version = "0.11.1"
|
||||
version = "0.16.0"
|
||||
description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
|
||||
authors = ["Joao Moura <joao@crewai.com>"]
|
||||
readme = "README.md"
|
||||
@@ -16,7 +16,7 @@ Documentation = "https://github.com/joaomdmoura/CrewAI/wiki/Index"
|
||||
Repository = "https://github.com/joaomdmoura/crewai"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
python = ">=3.10,<=3.13"
|
||||
pydantic = "^2.4.2"
|
||||
langchain = "^0.1.0"
|
||||
openai = "^1.7.1"
|
||||
@@ -26,10 +26,15 @@ opentelemetry-sdk = "^1.22.0"
|
||||
opentelemetry-exporter-otlp-proto-http = "^1.22.0"
|
||||
instructor = "^0.5.2"
|
||||
regex = "^2023.12.25"
|
||||
crewai-tools = { version = "^0.0.12", optional = true }
|
||||
click = "^8.1.7"
|
||||
|
||||
[tool.poetry.extras]
|
||||
tools = ["crewai-tools"]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
isort = "^5.13.2"
|
||||
pyright = "1.1.333"
|
||||
pyright = ">=1.1.350,<2.0.0"
|
||||
black = {git = "https://github.com/psf/black.git", rev = "stable"}
|
||||
autoflake = "^2.2.1"
|
||||
pre-commit = "^3.6.0"
|
||||
@@ -40,15 +45,17 @@ mkdocs-material = {extras = ["imaging"], version = "^9.5.7"}
|
||||
mkdocs-material-extensions = "^1.3.1"
|
||||
pillow = "^10.2.0"
|
||||
cairosvg = "^2.7.1"
|
||||
crewai_tools = "^0.0.12"
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
known_first_party = ["crewai"]
|
||||
|
||||
|
||||
[tool.poetry.scripts]
|
||||
crewai = "crewai.cli.cli:crewai"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.4"
|
||||
pytest = "^8.0.0"
|
||||
pytest-vcr = "^1.0.2"
|
||||
python-dotenv = "1.0.0"
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import os
|
||||
import uuid
|
||||
from typing import Any, List, Optional
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from langchain.agents.agent import RunnableAgent
|
||||
from langchain.agents.format_scratchpad import format_log_to_str
|
||||
from langchain.agents.output_parsers import ReActSingleInputOutputParser
|
||||
from langchain.agents.tools import tool as LangChainTool
|
||||
from langchain.memory import ConversationSummaryMemory
|
||||
from langchain.tools.render import render_text_description
|
||||
from langchain_core.agents import AgentAction
|
||||
from langchain_openai import ChatOpenAI
|
||||
from pydantic import (
|
||||
UUID4,
|
||||
@@ -20,8 +20,9 @@ from pydantic import (
|
||||
)
|
||||
from pydantic_core import PydanticCustomError
|
||||
|
||||
from crewai.agents import CacheHandler, CrewAgentExecutor, ToolsHandler
|
||||
from crewai.agents import CacheHandler, CrewAgentExecutor, CrewAgentParser, ToolsHandler
|
||||
from crewai.utilities import I18N, Logger, Prompts, RPMController
|
||||
from crewai.utilities.token_counter_callback import TokenCalcHandler, TokenProcess
|
||||
|
||||
|
||||
class Agent(BaseModel):
|
||||
@@ -50,7 +51,9 @@ class Agent(BaseModel):
|
||||
_logger: Logger = PrivateAttr()
|
||||
_rpm_controller: RPMController = PrivateAttr(default=None)
|
||||
_request_within_rpm_limit: Any = PrivateAttr(default=None)
|
||||
_token_process: TokenProcess = TokenProcess()
|
||||
|
||||
formatting_errors: int = 0
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
id: UUID4 = Field(
|
||||
default_factory=uuid.uuid4,
|
||||
@@ -65,7 +68,7 @@ class Agent(BaseModel):
|
||||
description="Maximum number of requests per minute for the agent execution to be respected.",
|
||||
)
|
||||
memory: bool = Field(
|
||||
default=True, description="Whether the agent should have memory or not"
|
||||
default=False, description="Whether the agent should have memory or not"
|
||||
)
|
||||
verbose: bool = Field(
|
||||
default=False, description="Verbose mode for the Agent Execution"
|
||||
@@ -73,7 +76,7 @@ class Agent(BaseModel):
|
||||
allow_delegation: bool = Field(
|
||||
default=True, description="Allow delegation of tasks to agents"
|
||||
)
|
||||
tools: List[Any] = Field(
|
||||
tools: Optional[List[Any]] = Field(
|
||||
default_factory=list, description="Tools at agents disposal"
|
||||
)
|
||||
max_iter: Optional[int] = Field(
|
||||
@@ -122,8 +125,12 @@ class Agent(BaseModel):
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_agent_executor(self) -> "Agent":
|
||||
"""Check if the agent executor is set."""
|
||||
def set_agent_executor(self) -> "Agent":
|
||||
"""set agent executor is set."""
|
||||
if hasattr(self.llm, "model_name"):
|
||||
self.llm.callbacks = [
|
||||
TokenCalcHandler(self.llm.model_name, self._token_process)
|
||||
]
|
||||
if not self.agent_executor:
|
||||
self.set_cache_handler(self.cache_handler)
|
||||
return self
|
||||
@@ -151,7 +158,8 @@ class Agent(BaseModel):
|
||||
task=task_prompt, context=context
|
||||
)
|
||||
|
||||
tools = tools or self.tools
|
||||
tools = self._parse_tools(tools or self.tools)
|
||||
self.create_agent_executor(tools=tools)
|
||||
self.agent_executor.tools = tools
|
||||
self.agent_executor.task = task
|
||||
self.agent_executor.tools_description = render_text_description(tools)
|
||||
@@ -190,22 +198,27 @@ class Agent(BaseModel):
|
||||
self._rpm_controller = rpm_controller
|
||||
self.create_agent_executor()
|
||||
|
||||
def create_agent_executor(self) -> None:
|
||||
def create_agent_executor(self, tools=None) -> None:
|
||||
"""Create an agent executor for the agent.
|
||||
|
||||
Returns:
|
||||
An instance of the CrewAgentExecutor class.
|
||||
"""
|
||||
tools = tools or self.tools
|
||||
|
||||
agent_args = {
|
||||
"input": lambda x: x["input"],
|
||||
"tools": lambda x: x["tools"],
|
||||
"tool_names": lambda x: x["tool_names"],
|
||||
"agent_scratchpad": lambda x: format_log_to_str(x["intermediate_steps"]),
|
||||
"agent_scratchpad": lambda x: self.format_log_to_str(
|
||||
x["intermediate_steps"]
|
||||
),
|
||||
}
|
||||
|
||||
executor_args = {
|
||||
"llm": self.llm,
|
||||
"i18n": self.i18n,
|
||||
"tools": self.tools,
|
||||
"tools": self._parse_tools(tools),
|
||||
"verbose": self.verbose,
|
||||
"handle_parsing_errors": True,
|
||||
"max_iterations": self.max_iter,
|
||||
@@ -225,9 +238,9 @@ class Agent(BaseModel):
|
||||
)
|
||||
executor_args["memory"] = summary_memory
|
||||
agent_args["chat_history"] = lambda x: x["chat_history"]
|
||||
prompt = Prompts(i18n=self.i18n).task_execution_with_memory()
|
||||
prompt = Prompts(i18n=self.i18n, tools=tools).task_execution_with_memory()
|
||||
else:
|
||||
prompt = Prompts(i18n=self.i18n).task_execution()
|
||||
prompt = Prompts(i18n=self.i18n, tools=tools).task_execution()
|
||||
|
||||
execution_prompt = prompt.partial(
|
||||
goal=self.goal,
|
||||
@@ -236,13 +249,50 @@ class Agent(BaseModel):
|
||||
)
|
||||
|
||||
bind = self.llm.bind(stop=[self.i18n.slice("observation")])
|
||||
inner_agent = (
|
||||
agent_args | execution_prompt | bind | ReActSingleInputOutputParser()
|
||||
)
|
||||
inner_agent = agent_args | execution_prompt | bind | CrewAgentParser(agent=self)
|
||||
self.agent_executor = CrewAgentExecutor(
|
||||
agent=RunnableAgent(runnable=inner_agent), **executor_args
|
||||
)
|
||||
|
||||
def interpolate_inputs(self, inputs: Dict[str, Any]) -> None:
|
||||
"""Interpolate inputs into the agent description and backstory."""
|
||||
self.role = self.role.format(**inputs)
|
||||
self.goal = self.goal.format(**inputs)
|
||||
self.backstory = self.backstory.format(**inputs)
|
||||
|
||||
def increment_formatting_errors(self) -> None:
|
||||
"""Count the formatting errors of the agent."""
|
||||
self.formatting_errors += 1
|
||||
|
||||
def format_log_to_str(
|
||||
self,
|
||||
intermediate_steps: List[Tuple[AgentAction, str]],
|
||||
observation_prefix: str = "Result: ",
|
||||
llm_prefix: str = "",
|
||||
) -> str:
|
||||
"""Construct the scratchpad that lets the agent continue its thought process."""
|
||||
thoughts = ""
|
||||
for action, observation in intermediate_steps:
|
||||
thoughts += action.log
|
||||
thoughts += f"\n{observation_prefix}{observation}\n{llm_prefix}"
|
||||
return thoughts
|
||||
|
||||
def _parse_tools(self, tools: List[Any]) -> List[LangChainTool]:
|
||||
"""Parse tools to be used for the task."""
|
||||
# tentatively try to import from crewai_tools import BaseTool as CrewAITool
|
||||
tools_list = []
|
||||
try:
|
||||
from crewai_tools import BaseTool as CrewAITool
|
||||
|
||||
for tool in tools:
|
||||
if isinstance(tool, CrewAITool):
|
||||
tools_list.append(tool.to_langchain())
|
||||
else:
|
||||
tools_list.append(tool)
|
||||
except ModuleNotFoundError:
|
||||
tools_list.append(tool)
|
||||
return tools_list
|
||||
|
||||
@staticmethod
|
||||
def __tools_names(tools) -> str:
|
||||
return ", ".join([t.name for t in tools])
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from .cache.cache_handler import CacheHandler
|
||||
from .executor import CrewAgentExecutor
|
||||
from .parser import CrewAgentParser
|
||||
from .tools_handler import ToolsHandler
|
||||
|
||||
@@ -3,7 +3,6 @@ from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from langchain.agents import AgentExecutor
|
||||
from langchain.agents.agent import ExceptionTool
|
||||
from langchain.agents.tools import InvalidTool
|
||||
from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain_core.agents import AgentAction, AgentFinish, AgentStep
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
@@ -13,12 +12,12 @@ from langchain_core.utils.input import get_color_mapping
|
||||
from pydantic import InstanceOf
|
||||
|
||||
from crewai.agents.tools_handler import ToolsHandler
|
||||
from crewai.tools.tool_usage import ToolUsage
|
||||
from crewai.tools.tool_usage import ToolUsage, ToolUsageErrorException
|
||||
from crewai.utilities import I18N
|
||||
|
||||
|
||||
class CrewAgentExecutor(AgentExecutor):
|
||||
i18n: I18N = I18N()
|
||||
_i18n: I18N = I18N()
|
||||
llm: Any = None
|
||||
iterations: int = 0
|
||||
task: Any = None
|
||||
@@ -105,7 +104,6 @@ class CrewAgentExecutor(AgentExecutor):
|
||||
"""
|
||||
try:
|
||||
intermediate_steps = self._prepare_intermediate_steps(intermediate_steps)
|
||||
|
||||
# Call the LLM to see what to do.
|
||||
output = self.agent.plan(
|
||||
intermediate_steps,
|
||||
@@ -114,14 +112,19 @@ class CrewAgentExecutor(AgentExecutor):
|
||||
)
|
||||
|
||||
if self._should_force_answer():
|
||||
if isinstance(output, AgentAction) or isinstance(output, AgentFinish):
|
||||
if isinstance(output, AgentFinish):
|
||||
yield output
|
||||
return
|
||||
|
||||
if isinstance(output, AgentAction):
|
||||
output = output
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unexpected output type from agent: {type(output)}"
|
||||
)
|
||||
|
||||
yield AgentStep(
|
||||
action=output, observation=self.i18n.errors("force_final_answer")
|
||||
action=output, observation=self._i18n.errors("force_final_answer")
|
||||
)
|
||||
return
|
||||
|
||||
@@ -140,14 +143,14 @@ class CrewAgentExecutor(AgentExecutor):
|
||||
text = str(e)
|
||||
if isinstance(self.handle_parsing_errors, bool):
|
||||
if e.send_to_llm:
|
||||
observation = str(e.observation)
|
||||
observation = f"\n{str(e.observation)}"
|
||||
text = str(e.llm_output)
|
||||
else:
|
||||
observation = "Invalid or incomplete response"
|
||||
observation = ""
|
||||
elif isinstance(self.handle_parsing_errors, str):
|
||||
observation = self.handle_parsing_errors
|
||||
observation = f"\n{self.handle_parsing_errors}"
|
||||
elif callable(self.handle_parsing_errors):
|
||||
observation = self.handle_parsing_errors(e)
|
||||
observation = f"\n{self.handle_parsing_errors(e)}"
|
||||
else:
|
||||
raise ValueError("Got unexpected type of `handle_parsing_errors`")
|
||||
output = AgentAction("_Exception", observation, text)
|
||||
@@ -164,7 +167,7 @@ class CrewAgentExecutor(AgentExecutor):
|
||||
|
||||
if self._should_force_answer():
|
||||
yield AgentStep(
|
||||
action=output, observation=self.i18n.errors("force_final_answer")
|
||||
action=output, observation=self._i18n.errors("force_final_answer")
|
||||
)
|
||||
return
|
||||
|
||||
@@ -183,32 +186,27 @@ class CrewAgentExecutor(AgentExecutor):
|
||||
if run_manager:
|
||||
run_manager.on_agent_action(agent_action, color="green")
|
||||
# Otherwise we lookup the tool
|
||||
if agent_action.tool in name_to_tool_map:
|
||||
tool = name_to_tool_map[agent_action.tool]
|
||||
return_direct = tool.return_direct
|
||||
color_mapping[agent_action.tool]
|
||||
tool_run_kwargs = self.agent.tool_run_logging_kwargs()
|
||||
if return_direct:
|
||||
tool_run_kwargs["llm_prefix"] = ""
|
||||
observation = ToolUsage(
|
||||
tools_handler=self.tools_handler,
|
||||
tools=self.tools,
|
||||
tools_description=self.tools_description,
|
||||
tools_names=self.tools_names,
|
||||
function_calling_llm=self.function_calling_llm,
|
||||
llm=self.llm,
|
||||
task=self.task,
|
||||
).use(agent_action.log)
|
||||
tool_usage = ToolUsage(
|
||||
tools_handler=self.tools_handler,
|
||||
tools=self.tools,
|
||||
tools_description=self.tools_description,
|
||||
tools_names=self.tools_names,
|
||||
function_calling_llm=self.function_calling_llm,
|
||||
llm=self.llm,
|
||||
task=self.task,
|
||||
)
|
||||
tool_calling = tool_usage.parse(agent_action.log)
|
||||
|
||||
if isinstance(tool_calling, ToolUsageErrorException):
|
||||
observation = tool_calling.message
|
||||
else:
|
||||
tool_run_kwargs = self.agent.tool_run_logging_kwargs()
|
||||
observation = InvalidTool().run(
|
||||
{
|
||||
"requested_tool_name": agent_action.tool,
|
||||
"available_tool_names": list(name_to_tool_map.keys()),
|
||||
},
|
||||
verbose=self.verbose,
|
||||
color=None,
|
||||
callbacks=run_manager.get_child() if run_manager else None,
|
||||
**tool_run_kwargs,
|
||||
)
|
||||
if tool_calling.tool_name.lower().strip() in [
|
||||
name.lower().strip() for name in name_to_tool_map
|
||||
]:
|
||||
observation = tool_usage.use(tool_calling, agent_action.log)
|
||||
else:
|
||||
observation = self._i18n.errors("wrong_tool_name").format(
|
||||
tool=tool_calling.tool_name,
|
||||
tools=", ".join([tool.name for tool in self.tools]),
|
||||
)
|
||||
yield AgentStep(action=agent_action, observation=observation)
|
||||
|
||||
63
src/crewai/agents/parser.py
Normal file
63
src/crewai/agents/parser.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import Any, Union
|
||||
|
||||
from langchain.agents.output_parsers import ReActSingleInputOutputParser
|
||||
from langchain_core.agents import AgentAction, AgentFinish
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
|
||||
from crewai.utilities import I18N
|
||||
|
||||
TOOL_USAGE_SECTION = "Use Tool:"
|
||||
FINAL_ANSWER_ACTION = "Final Answer:"
|
||||
FINAL_ANSWER_AND_TOOL_ERROR_MESSAGE = "I tried to use a tool and give a final answer at the same time, I must choose only one."
|
||||
|
||||
|
||||
class CrewAgentParser(ReActSingleInputOutputParser):
|
||||
"""Parses Crew-style LLM calls that have a single tool input.
|
||||
|
||||
Expects output to be in one of two formats.
|
||||
|
||||
If the output signals that an action should be taken,
|
||||
should be in the below format. This will result in an AgentAction
|
||||
being returned.
|
||||
|
||||
```
|
||||
Use Tool: All context for using the tool here
|
||||
```
|
||||
|
||||
If the output signals that a final answer should be given,
|
||||
should be in the below format. This will result in an AgentFinish
|
||||
being returned.
|
||||
|
||||
```
|
||||
Final Answer: The temperature is 100 degrees
|
||||
```
|
||||
"""
|
||||
|
||||
_i18n: I18N = I18N()
|
||||
agent: Any = None
|
||||
|
||||
def parse(self, text: str) -> Union[AgentAction, AgentFinish]:
|
||||
includes_answer = FINAL_ANSWER_ACTION in text
|
||||
includes_tool = TOOL_USAGE_SECTION in text
|
||||
|
||||
if includes_tool:
|
||||
if includes_answer:
|
||||
self.agent.increment_formatting_errors()
|
||||
raise OutputParserException(f"{FINAL_ANSWER_AND_TOOL_ERROR_MESSAGE}")
|
||||
|
||||
return AgentAction("", "", text)
|
||||
|
||||
elif includes_answer:
|
||||
return AgentFinish(
|
||||
{"output": text.split(FINAL_ANSWER_ACTION)[-1].strip()}, text
|
||||
)
|
||||
|
||||
format = self._i18n.slice("format_without_tools")
|
||||
error = f"{format}"
|
||||
self.agent.increment_formatting_errors()
|
||||
raise OutputParserException(
|
||||
error,
|
||||
observation=error,
|
||||
llm_output=text,
|
||||
send_to_llm=True,
|
||||
)
|
||||
@@ -18,7 +18,6 @@ class ToolsHandler:
|
||||
|
||||
def on_tool_use(self, calling: ToolCalling, output: str) -> Any:
|
||||
"""Run when tool ends running."""
|
||||
print(f"Tool {calling.tool_name} has been used.")
|
||||
self.last_used_tool = calling
|
||||
if calling.tool_name != CacheTools().name:
|
||||
self.cache.add(
|
||||
|
||||
@@ -41,7 +41,7 @@ class Crew(BaseModel):
|
||||
full_output: Whether the crew should return the full output with all tasks outputs or just the final output.
|
||||
step_callback: Callback to be executed after each step for every agents execution.
|
||||
share_crew: Whether you want to share the complete crew infromation and execution with crewAI to make the library better, and allow us to train models.
|
||||
_cache_handler: Handles caching for the crew's operations.
|
||||
inputs: Any inputs that the crew will use in tasks or agents, it will be interpolated in promtps.
|
||||
"""
|
||||
|
||||
__hash__ = object.__hash__ # type: ignore
|
||||
@@ -54,6 +54,10 @@ class Crew(BaseModel):
|
||||
agents: List[Agent] = Field(default_factory=list)
|
||||
process: Process = Field(default=Process.sequential)
|
||||
verbose: Union[int, bool] = Field(default=0)
|
||||
usage_metrics: Optional[dict] = Field(
|
||||
default=None,
|
||||
description="Metrics for the LLM usage during all tasks execution.",
|
||||
)
|
||||
full_output: Optional[bool] = Field(
|
||||
default=False,
|
||||
description="Whether the crew should return the full output with all tasks outputs or just the final output.",
|
||||
@@ -64,6 +68,10 @@ class Crew(BaseModel):
|
||||
function_calling_llm: Optional[Any] = Field(
|
||||
description="Language model that will run the agent.", default=None
|
||||
)
|
||||
inputs: Optional[Dict[str, Any]] = Field(
|
||||
description="Any inputs that the crew will use in tasks or agents, it will be interpolated in promtps.",
|
||||
default={},
|
||||
)
|
||||
config: Optional[Union[Json, Dict[str, Any]]] = Field(default=None)
|
||||
id: UUID4 = Field(default_factory=uuid.uuid4, frozen=True)
|
||||
share_crew: Optional[bool] = Field(default=False)
|
||||
@@ -126,6 +134,15 @@ class Crew(BaseModel):
|
||||
)
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def interpolate_inputs(self):
|
||||
"""Interpolates the inputs in the tasks and agents."""
|
||||
for task in self.tasks:
|
||||
task.interpolate_inputs(self.inputs)
|
||||
for agent in self.agents:
|
||||
agent.interpolate_inputs(self.inputs)
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_config(self):
|
||||
"""Validates that the crew is properly configured with agents and tasks."""
|
||||
@@ -188,14 +205,27 @@ class Crew(BaseModel):
|
||||
agent.step_callback = self.step_callback
|
||||
agent.create_agent_executor()
|
||||
|
||||
if self.process == Process.sequential:
|
||||
return self._run_sequential_process()
|
||||
if self.process == Process.hierarchical:
|
||||
return self._run_hierarchical_process()
|
||||
metrics = []
|
||||
|
||||
raise NotImplementedError(
|
||||
f"The process '{self.process}' is not implemented yet."
|
||||
)
|
||||
if self.process == Process.sequential:
|
||||
result = self._run_sequential_process()
|
||||
elif self.process == Process.hierarchical:
|
||||
result, manager_metrics = self._run_hierarchical_process()
|
||||
metrics.append(manager_metrics)
|
||||
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"The process '{self.process}' is not implemented yet."
|
||||
)
|
||||
|
||||
metrics = metrics + [
|
||||
agent._token_process.get_summary() for agent in self.agents
|
||||
]
|
||||
self.usage_metrics = {
|
||||
key: sum([m[key] for m in metrics if m is not None]) for key in metrics[0]
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
def _run_sequential_process(self) -> str:
|
||||
"""Executes tasks sequentially and returns the final output."""
|
||||
@@ -205,7 +235,8 @@ class Crew(BaseModel):
|
||||
agents_for_delegation = [
|
||||
agent for agent in self.agents if agent != task.agent
|
||||
]
|
||||
task.tools += AgentTools(agents=agents_for_delegation).tools()
|
||||
if len(self.agents) > 1 and len(agents_for_delegation) > 0:
|
||||
task.tools += AgentTools(agents=agents_for_delegation).tools()
|
||||
|
||||
role = task.agent.role if task.agent is not None else "None"
|
||||
self._logger.log("debug", f"Working Agent: {role}")
|
||||
@@ -248,14 +279,14 @@ class Crew(BaseModel):
|
||||
)
|
||||
|
||||
self._finish_execution(task_output)
|
||||
return self._format_output(task_output)
|
||||
return self._format_output(task_output), manager._token_process.get_summary()
|
||||
|
||||
def _format_output(self, output: str) -> str:
|
||||
"""Formats the output of the crew execution."""
|
||||
if self.full_output:
|
||||
return {
|
||||
"final_output": output,
|
||||
"tasks_outputs": [task.output for task in self.tasks],
|
||||
"tasks_outputs": [task.output for task in self.tasks if task],
|
||||
}
|
||||
else:
|
||||
return output
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import threading
|
||||
import uuid
|
||||
from typing import Any, List, Optional
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
from langchain_openai import ChatOpenAI
|
||||
from pydantic import UUID4, BaseModel, Field, field_validator, model_validator
|
||||
from pydantic_core import PydanticCustomError
|
||||
|
||||
from crewai.agent import Agent
|
||||
from crewai.tasks.task_output import TaskOutput
|
||||
from crewai.utilities import I18N
|
||||
from crewai.utilities import I18N, Converter, ConverterError, Printer
|
||||
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
||||
|
||||
|
||||
class Task(BaseModel):
|
||||
@@ -18,6 +20,8 @@ class Task(BaseModel):
|
||||
|
||||
__hash__ = object.__hash__ # type: ignore
|
||||
used_tools: int = 0
|
||||
tools_errors: int = 0
|
||||
delegations: int = 0
|
||||
i18n: I18N = I18N()
|
||||
thread: threading.Thread = None
|
||||
description: str = Field(description="Description of the actual task.")
|
||||
@@ -39,10 +43,22 @@ class Task(BaseModel):
|
||||
description="Whether the task should be executed asynchronously or not.",
|
||||
default=False,
|
||||
)
|
||||
output_json: Optional[Type[BaseModel]] = Field(
|
||||
description="A Pydantic model to be used to create a JSON output.",
|
||||
default=None,
|
||||
)
|
||||
output_pydantic: Optional[Type[BaseModel]] = Field(
|
||||
description="A Pydantic model to be used to create a Pydantic output.",
|
||||
default=None,
|
||||
)
|
||||
output_file: Optional[str] = Field(
|
||||
description="A file path to be used to create a file output.",
|
||||
default=None,
|
||||
)
|
||||
output: Optional[TaskOutput] = Field(
|
||||
description="Task output, it's final result after being executed", default=None
|
||||
)
|
||||
tools: List[Any] = Field(
|
||||
tools: Optional[List[Any]] = Field(
|
||||
default_factory=list,
|
||||
description="Tools the agent is limited to use for this task.",
|
||||
)
|
||||
@@ -67,6 +83,18 @@ class Task(BaseModel):
|
||||
self.tools.extend(self.agent.tools)
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_output(self):
|
||||
"""Check if an output type is set."""
|
||||
output_types = [self.output_json, self.output_pydantic]
|
||||
if len([type for type in output_types if type]) > 1:
|
||||
raise PydanticCustomError(
|
||||
"output_type",
|
||||
"Only one output type can be set, either output_pydantic or output_json.",
|
||||
{},
|
||||
)
|
||||
return self
|
||||
|
||||
def execute(
|
||||
self,
|
||||
agent: Agent | None = None,
|
||||
@@ -90,7 +118,8 @@ class Task(BaseModel):
|
||||
for task in self.context:
|
||||
if task.async_execution:
|
||||
task.thread.join()
|
||||
context.append(task.output.result)
|
||||
if task and task.output:
|
||||
context.append(task.output.raw_output)
|
||||
context = "\n".join(context)
|
||||
|
||||
tools = tools or self.tools
|
||||
@@ -115,9 +144,19 @@ class Task(BaseModel):
|
||||
context=context,
|
||||
tools=tools,
|
||||
)
|
||||
self.output = TaskOutput(description=self.description, result=result)
|
||||
self.callback(self.output) if self.callback else None
|
||||
return result
|
||||
|
||||
exported_output = self._export_output(result)
|
||||
|
||||
self.output = TaskOutput(
|
||||
description=self.description,
|
||||
exported_output=exported_output,
|
||||
raw_output=result,
|
||||
)
|
||||
|
||||
if self.callback:
|
||||
self.callback(self.output)
|
||||
|
||||
return exported_output
|
||||
|
||||
def prompt(self) -> str:
|
||||
"""Prompt the task.
|
||||
@@ -133,3 +172,61 @@ class Task(BaseModel):
|
||||
)
|
||||
tasks_slices = [self.description, output]
|
||||
return "\n".join(tasks_slices)
|
||||
|
||||
def interpolate_inputs(self, inputs: Dict[str, Any]) -> None:
|
||||
"""Interpolate inputs into the task description and expected output."""
|
||||
self.description = self.description.format(**inputs)
|
||||
if self.expected_output:
|
||||
self.expected_output = self.expected_output.format(**inputs)
|
||||
|
||||
def increment_tools_errors(self) -> None:
|
||||
"""Increment the tools errors counter."""
|
||||
self.tools_errors += 1
|
||||
|
||||
def increment_delegations(self) -> None:
|
||||
"""Increment the delegations counter."""
|
||||
self.delegations += 1
|
||||
|
||||
def _export_output(self, result: str) -> Any:
|
||||
exported_result = result
|
||||
instructions = "I'm gonna convert this raw text into valid JSON."
|
||||
|
||||
if self.output_pydantic or self.output_json:
|
||||
model = self.output_pydantic or self.output_json
|
||||
llm = self.agent.function_calling_llm or self.agent.llm
|
||||
|
||||
if not self._is_gpt(llm):
|
||||
model_schema = PydanticSchemaParser(model=model).get_schema()
|
||||
instructions = f"{instructions}\n\nThe json should have the following structure, with the following keys:\n{model_schema}"
|
||||
|
||||
converter = Converter(
|
||||
llm=llm, text=result, model=model, instructions=instructions
|
||||
)
|
||||
|
||||
if self.output_pydantic:
|
||||
exported_result = converter.to_pydantic()
|
||||
elif self.output_json:
|
||||
exported_result = converter.to_json()
|
||||
|
||||
if isinstance(exported_result, ConverterError):
|
||||
Printer().print(
|
||||
content=f"{exported_result.message} Using raw output instead.",
|
||||
color="red",
|
||||
)
|
||||
exported_result = result
|
||||
|
||||
if self.output_file:
|
||||
content = (
|
||||
exported_result if not self.output_pydantic else exported_result.json()
|
||||
)
|
||||
self._save_file(content)
|
||||
|
||||
return exported_result
|
||||
|
||||
def _is_gpt(self, llm) -> bool:
|
||||
return isinstance(llm, ChatOpenAI) and llm.openai_api_base == None
|
||||
|
||||
def _save_file(self, result: Any) -> None:
|
||||
with open(self.output_file, "w") as file:
|
||||
file.write(result)
|
||||
return None
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
@@ -8,10 +8,16 @@ class TaskOutput(BaseModel):
|
||||
|
||||
description: str = Field(description="Description of the task")
|
||||
summary: Optional[str] = Field(description="Summary of the task", default=None)
|
||||
result: str = Field(description="Result of the task")
|
||||
exported_output: Union[str, BaseModel] = Field(
|
||||
description="Output of the task", default=None
|
||||
)
|
||||
raw_output: str = Field(description="Result of the task")
|
||||
|
||||
@model_validator(mode="after")
|
||||
def set_summary(self):
|
||||
excerpt = " ".join(self.description.split(" ")[:10])
|
||||
self.summary = f"{excerpt}..."
|
||||
return self
|
||||
|
||||
def result(self):
|
||||
return self.exported_output
|
||||
|
||||
@@ -41,10 +41,12 @@ class Telemetry:
|
||||
self.ready = False
|
||||
try:
|
||||
telemetry_endpoint = "http://telemetry.crewai.com:4318"
|
||||
self.resource = Resource(attributes={SERVICE_NAME: "crewAI-telemetry"})
|
||||
self.resource = Resource(
|
||||
attributes={SERVICE_NAME: "crewAI-telemetry"},
|
||||
)
|
||||
self.provider = TracerProvider(resource=self.resource)
|
||||
processor = BatchSpanProcessor(
|
||||
OTLPSpanExporter(endpoint=f"{telemetry_endpoint}/v1/traces")
|
||||
OTLPSpanExporter(endpoint=f"{telemetry_endpoint}/v1/traces", timeout=15)
|
||||
)
|
||||
self.provider.add_span_processor(processor)
|
||||
self.ready = True
|
||||
@@ -53,7 +55,10 @@ class Telemetry:
|
||||
|
||||
def set_tracer(self):
|
||||
if self.ready:
|
||||
trace.set_tracer_provider(self.provider)
|
||||
try:
|
||||
trace.set_tracer_provider(self.provider)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def crew_creation(self, crew):
|
||||
"""Records the creation of a crew."""
|
||||
@@ -232,7 +237,7 @@ class Telemetry:
|
||||
{
|
||||
"id": str(task.id),
|
||||
"description": task.description,
|
||||
"output": task.output.result,
|
||||
"output": task.output.raw_output,
|
||||
}
|
||||
for task in crew.tasks
|
||||
]
|
||||
|
||||
@@ -42,11 +42,16 @@ class AgentTools(BaseModel):
|
||||
|
||||
def _execute(self, agent, task, context):
|
||||
"""Execute the command."""
|
||||
agent = [
|
||||
available_agent
|
||||
for available_agent in self.agents
|
||||
if available_agent.role.lower() == agent.lower()
|
||||
]
|
||||
try:
|
||||
agent = [
|
||||
available_agent
|
||||
for available_agent in self.agents
|
||||
if available_agent.role.strip().lower() == agent.strip().lower()
|
||||
]
|
||||
except:
|
||||
return self.i18n.errors("agent_tool_unexsiting_coworker").format(
|
||||
coworkers="\n".join([f"- {agent.role}" for agent in self.agents])
|
||||
)
|
||||
|
||||
if not agent:
|
||||
return self.i18n.errors("agent_tool_unexsiting_coworker").format(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from pydantic import BaseModel as PydanticBaseModel
|
||||
from pydantic import Field as PydanticField
|
||||
@@ -7,7 +7,7 @@ from pydantic.v1 import BaseModel, Field
|
||||
|
||||
class ToolCalling(BaseModel):
|
||||
tool_name: str = Field(..., description="The name of the tool to be called.")
|
||||
arguments: Dict[str, Any] = Field(
|
||||
arguments: Optional[Dict[str, Any]] = Field(
|
||||
..., description="A dictinary of arguments to be passed to the tool."
|
||||
)
|
||||
|
||||
@@ -16,6 +16,6 @@ class InstructorToolCalling(PydanticBaseModel):
|
||||
tool_name: str = PydanticField(
|
||||
..., description="The name of the tool to be called."
|
||||
)
|
||||
arguments: Dict = PydanticField(
|
||||
arguments: Optional[Dict[str, Any]] = PydanticField(
|
||||
..., description="A dictinary of arguments to be passed to the tool."
|
||||
)
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from textwrap import dedent
|
||||
from typing import Any, List, Union
|
||||
|
||||
import instructor
|
||||
from langchain.prompts import PromptTemplate
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
from crewai.agents.tools_handler import ToolsHandler
|
||||
from crewai.telemtry import Telemetry
|
||||
from crewai.tools.tool_calling import InstructorToolCalling, ToolCalling
|
||||
from crewai.tools.tool_output_parser import ToolOutputParser
|
||||
from crewai.utilities import I18N, Printer
|
||||
from crewai.utilities import I18N, Converter, ConverterError, Printer
|
||||
|
||||
OPENAI_BIGGER_MODELS = ["gpt-4"]
|
||||
|
||||
|
||||
class ToolUsageErrorException(Exception):
|
||||
@@ -31,6 +31,7 @@ class ToolUsage:
|
||||
tools_description: Description of the tools available for the agent.
|
||||
tools_names: Names of the tools available for the agent.
|
||||
llm: Language model to be used for the tool usage.
|
||||
function_calling_llm: Language model to be used for the tool usage.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -47,29 +48,41 @@ class ToolUsage:
|
||||
self._printer: Printer = Printer()
|
||||
self._telemetry: Telemetry = Telemetry()
|
||||
self._run_attempts: int = 1
|
||||
self._max_parsing_attempts: int = 2
|
||||
self._max_parsing_attempts: int = 3
|
||||
self._remeber_format_after_usages: int = 3
|
||||
self.tools_description = tools_description
|
||||
self.tools_names = tools_names
|
||||
self.tools_handler = tools_handler
|
||||
self.tools = tools
|
||||
self.task = task
|
||||
self.llm = llm
|
||||
self.function_calling_llm = function_calling_llm
|
||||
self.llm = function_calling_llm or llm
|
||||
|
||||
def use(self, tool_string: str):
|
||||
calling = self._tool_calling(tool_string)
|
||||
# Set the maximum parsing attempts for bigger models
|
||||
if (isinstance(self.llm, ChatOpenAI)) and (self.llm.openai_api_base == None):
|
||||
if self.llm.model_name in OPENAI_BIGGER_MODELS:
|
||||
self._max_parsing_attempts = 2
|
||||
self._remeber_format_after_usages = 4
|
||||
|
||||
def parse(self, tool_string: str):
|
||||
"""Parse the tool string and return the tool calling."""
|
||||
return self._tool_calling(tool_string)
|
||||
|
||||
def use(
|
||||
self, calling: Union[ToolCalling, InstructorToolCalling], tool_string: str
|
||||
) -> str:
|
||||
if isinstance(calling, ToolUsageErrorException):
|
||||
error = calling.message
|
||||
self._printer.print(content=f"\n\n{error}\n", color="red")
|
||||
self.task.increment_tools_errors()
|
||||
return error
|
||||
try:
|
||||
tool = self._select_tool(calling.tool_name)
|
||||
except Exception as e:
|
||||
error = getattr(e, "message", str(e))
|
||||
self.task.increment_tools_errors()
|
||||
self._printer.print(content=f"\n\n{error}\n", color="red")
|
||||
return error
|
||||
return self._use(tool_string=tool_string, tool=tool, calling=calling)
|
||||
return f"{self._use(tool_string=tool_string, tool=tool, calling=calling)}\n\n{self._i18n.slice('final_answer_format')}"
|
||||
|
||||
def _use(
|
||||
self,
|
||||
@@ -92,7 +105,7 @@ class ToolUsage:
|
||||
result = self._format_result(result=result)
|
||||
return result
|
||||
except Exception:
|
||||
pass
|
||||
self.task.increment_tools_errors()
|
||||
|
||||
result = self.tools_handler.cache.read(
|
||||
tool=calling.tool_name, input=calling.arguments
|
||||
@@ -100,17 +113,31 @@ class ToolUsage:
|
||||
|
||||
if not result:
|
||||
try:
|
||||
result = tool._run(**calling.arguments)
|
||||
if calling.tool_name in [
|
||||
"Delegate work to co-worker",
|
||||
"Ask question to co-worker",
|
||||
]:
|
||||
self.task.increment_delegations()
|
||||
|
||||
if calling.arguments:
|
||||
result = tool._run(**calling.arguments)
|
||||
else:
|
||||
result = tool._run()
|
||||
except Exception as e:
|
||||
self._run_attempts += 1
|
||||
if self._run_attempts > self._max_parsing_attempts:
|
||||
self._telemetry.tool_usage_error(llm=self.llm)
|
||||
error_message = self._i18n.errors("tool_usage_exception").format(
|
||||
error=e
|
||||
)
|
||||
error = ToolUsageErrorException(
|
||||
self._i18n.errors("tool_usage_exception").format(error=e)
|
||||
f'\n{error_message}.\nMoving one then. {self._i18n.slice("format").format(tool_names=self.tools_names)}'
|
||||
).message
|
||||
self._printer.print(content=f"\n\n{error}\n", color="red")
|
||||
self.task.increment_tools_errors()
|
||||
self._printer.print(content=f"\n\n{error_message}\n", color="red")
|
||||
return error
|
||||
return self.use(tool_string=tool_string)
|
||||
self.task.increment_tools_errors()
|
||||
return self.use(calling=calling, tool_string=tool_string)
|
||||
|
||||
self.tools_handler.on_tool_use(calling=calling, output=result)
|
||||
|
||||
@@ -149,6 +176,7 @@ class ToolUsage:
|
||||
for tool in self.tools:
|
||||
if tool.name.lower().strip() == tool_name.lower().strip():
|
||||
return tool
|
||||
self.task.increment_tools_errors()
|
||||
raise Exception(f"Tool '{tool_name}' not found.")
|
||||
|
||||
def _render(self) -> str:
|
||||
@@ -170,69 +198,42 @@ class ToolUsage:
|
||||
)
|
||||
return "\n--\n".join(descriptions)
|
||||
|
||||
def _is_gpt(self, llm) -> bool:
|
||||
return isinstance(llm, ChatOpenAI) and llm.openai_api_base == None
|
||||
|
||||
def _tool_calling(
|
||||
self, tool_string: str
|
||||
) -> Union[ToolCalling, InstructorToolCalling]:
|
||||
try:
|
||||
tool_string = tool_string.replace(
|
||||
"Thought: Do I need to use a tool? Yes", ""
|
||||
model = InstructorToolCalling if self._is_gpt(self.llm) else ToolCalling
|
||||
converter = Converter(
|
||||
text=f"Only tools available:\n###\n{self._render()}\n\nReturn a valid schema for the tool, the tool name must be exactly equal one of the options, use this text to inform the valid ouput schema:\n\n{tool_string}```",
|
||||
llm=self.llm,
|
||||
model=model,
|
||||
instructions=dedent(
|
||||
"""\
|
||||
The schema should have the following structure, only two keys:
|
||||
- tool_name: str
|
||||
- arguments: dict (with all arguments being passed)
|
||||
|
||||
Example:
|
||||
{"tool_name": "tool name", "arguments": {"arg_name1": "value", "arg_name2": 2}}""",
|
||||
),
|
||||
max_attemps=1,
|
||||
)
|
||||
tool_string = tool_string.replace("Action:", "Tool Name:")
|
||||
tool_string = tool_string.replace("Action Input:", "Tool Arguments:")
|
||||
calling = converter.to_pydantic()
|
||||
|
||||
llm = self.function_calling_llm or self.llm
|
||||
|
||||
if (isinstance(llm, ChatOpenAI)) and (llm.openai_api_base == None):
|
||||
client = instructor.patch(
|
||||
llm.client._client,
|
||||
mode=instructor.Mode.FUNCTIONS,
|
||||
)
|
||||
calling = client.chat.completions.create(
|
||||
model=llm.model_name,
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": """
|
||||
The schema should have the following structure, only two key:
|
||||
- tool_name: str
|
||||
- arguments: dict (with all arguments being passed)
|
||||
|
||||
Example:
|
||||
{"tool_name": "tool_name", "arguments": {"arg_name1": "value", "arg_name2": 2}}
|
||||
""",
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"Tools available:\n\n{self._render()}\n\nReturn a valid schema for the tool, use this text to inform a valid ouput schema:\n{tool_string}```",
|
||||
},
|
||||
],
|
||||
response_model=InstructorToolCalling,
|
||||
)
|
||||
else:
|
||||
parser = ToolOutputParser(pydantic_object=ToolCalling)
|
||||
prompt = PromptTemplate(
|
||||
template="Tools available:\n\n{available_tools}\n\nReturn a valid schema for the tool, use this text to inform a valid ouput schema:\n{tool_string}\n\n{format_instructions}\n```",
|
||||
input_variables=["tool_string"],
|
||||
partial_variables={
|
||||
"available_tools": self._render(),
|
||||
"format_instructions": """
|
||||
The schema should have the following structure, only two key:
|
||||
- tool_name: str
|
||||
- arguments: dict (with all arguments being passed)
|
||||
|
||||
Example:
|
||||
{"tool_name": "tool_name", "arguments": {"arg_name1": "value", "arg_name2": 2}}
|
||||
""",
|
||||
},
|
||||
)
|
||||
chain = prompt | llm | parser
|
||||
calling = chain.invoke({"tool_string": tool_string})
|
||||
|
||||
except Exception:
|
||||
if isinstance(calling, ConverterError):
|
||||
raise calling
|
||||
except Exception as e:
|
||||
self._run_attempts += 1
|
||||
if self._run_attempts > self._max_parsing_attempts:
|
||||
self._telemetry.tool_usage_error(llm=llm)
|
||||
return ToolUsageErrorException(self._i18n.errors("tool_usage_error"))
|
||||
self._telemetry.tool_usage_error(llm=self.llm)
|
||||
self.task.increment_tools_errors()
|
||||
self._printer.print(content=f"\n\n{e}\n", color="red")
|
||||
return ToolUsageErrorException(
|
||||
f'{self._i18n.errors("tool_usage_error")}\n{self._i18n.slice("format").format(tool_names=self.tools_names)}'
|
||||
)
|
||||
return self._tool_calling(tool_string)
|
||||
|
||||
return calling
|
||||
|
||||
@@ -5,23 +5,29 @@
|
||||
"backstory": "You are a seasoned manager with a knack for getting the best out of your team.\nYou are also known for your ability to delegate work to the right people, and to ask the right questions to get the best out of your team.\nEven though you don't perform tasks by yourself, you have a lot of experience in the field, which allows you to properly evaluate the work of your team members."
|
||||
},
|
||||
"slices": {
|
||||
"observation": "\nObservation",
|
||||
"task": "Begin! This is VERY important to you, your job depends on it!\n\nCurrent Task: {input}",
|
||||
"observation": "\nResult",
|
||||
"task": "\n\nCurrent Task: {input}\n\n Begin! This is VERY important to you, your job depends on it!\n\n",
|
||||
"memory": "This is the summary of your work so far:\n{chat_history}",
|
||||
"role_playing": "You are {role}.\n{backstory}\n\nYour personal goal is: {goal}",
|
||||
"tools": "TOOLS:\n------\nYou have access to only the following tools:\n\n{tools}\n\nTo use a tool, please use the exact following format:\n\n```\nThought: Do I need to use a tool? Yes\nAction: the tool you wanna use, should be one of [{tool_names}], just the name.\nAction Input: Any and all relevant information input and context for using the tool\nObservation: the result of using the tool\n```\n\nWhen you have a response for your task, or if you do not need to use a tool, you MUST use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer: [your response here]```",
|
||||
"tools": "I have access to ONLY the following tools, I can use only these, use one at time:\n\n{tools}\n\nTo use a tool I MUST use the exact following format:\n\n```\nUse Tool: the tool I wanna use, should be one of [{tool_names}] and absolute all relevant input and context for using the tool, I must use only one tool at once.\nResult: [result of the tool]\n```\n\nTo give my final answer I'll use the exact following format:\n\n```\nFinal Answer: [my expected final answer, entire content of my most complete final answer goes here]\n```\nI MUST use these formats, my jobs depends on it!",
|
||||
"no_tools": "To give my final answer use the exact following format:\n\n```\nFinal Answer: [my expected final answer, entire content of my most complete final answer goes here]\n```\nI MUST use these formats, my jobs depends on it!",
|
||||
"format": "I MUST either use a tool (use one at time) OR give my best final answer. To use a single tool I MUST use the exact following format:\n\n```\nUse Tool: the tool I wanna use, should be one of [{tool_names}] and absolute all relevant input and context for using the tool, I must use only one tool at once.\nResult: [result of the tool]\n```\n\nTo give my final answer use the exact following format:\n\n```\nFinal Answer: [my expected final answer, entire content of my most complete final answer goes here]\n```\nI MUST use these formats, my jobs depends on it!",
|
||||
"final_answer_format": "If I don't need to use any more tools, I must make sure use the correct format to give my final answer:\n\n```Final Answer: [my expected final answer, entire content of my most complete final answer goes here]```\n I MUST use these formats, my jobs depends on it!",
|
||||
"format_without_tools": "\nSorry, I didn't use the right format. I MUST either use a tool (among the available ones), OR give my best final answer.\nI just remembered the expected formats I must follow:\n\n```\nUse Tool: the tool I wanna use, and absolute all relevant input and context for using the tool, I must use only one tool at once.\nResult: [result of the tool]\n```\nOR\n```\nFinal Answer: [my expected final answer, entire content of my most complete final answer goes here]\n```\n",
|
||||
"task_with_context": "{task}\nThis is the context you're working with:\n{context}",
|
||||
"expected_output": "Your final answer must be: {expected_output}"
|
||||
},
|
||||
"errors": {
|
||||
"force_final_answer": "Actually, I used too many tools, so I'll stop now and give you my absolute BEST Final answer NOW, using exaclty the expected format bellow: \n```\nThought: Do I need to use a tool? No\nFinal Answer: [your response here]```",
|
||||
"agent_tool_unexsiting_coworker": "\nError executing tool. Co-worker mentioned on the Action Input not found, it must to be one of the following options:\n{coworkers}.\n",
|
||||
"task_repeated_usage": "I just used the {tool} tool with input {tool_input}. So I already know that and must stop using it in a row with the same input. \nI could give my final answer if I'm ready, using exaclty the expected format bellow: \n\nThought: Do I need to use a tool? No\nFinal Answer: [your response here]\n",
|
||||
"unexpected_format": "\nSorry, I didn't use the expected format, I MUST either use a tool (use one at time) OR give my best final answer.\n",
|
||||
"force_final_answer": "Actually, I used too many tools, so I'll stop now and give you my absolute BEST Final answer NOW, using exaclty the expected format bellow:\n\n```\nFinal Answer: [my expected final answer, entire content of my most complete final answer goes here]\n```\nI MUST use these formats, my jobs depends on it!",
|
||||
"agent_tool_unexsiting_coworker": "\nError executing tool. Co-worker mentioned not found, it must to be one of the following options:\n{coworkers}\n",
|
||||
"task_repeated_usage": "I already used the {tool} tool with input {tool_input}. So I already know that and must stop using it with same input. \nI could give my best complete final answer if I'm ready, using exaclty the expected format bellow:\n\n```\nFinal Answer: [my expected final answer, entire content of my most complete final answer goes here]\n```\nI MUST use these formats, my jobs depends on it!",
|
||||
"tool_usage_error": "It seems we encountered an unexpected error while trying to use the tool.",
|
||||
"wrong_tool_name": "You tried to use the tool {tool}, but it doesn't exist. You must use one of the following tools, use one at time: {tools}.",
|
||||
"tool_usage_exception": "It seems we encountered an unexpected error while trying to use the tool. This was the error: {error}"
|
||||
},
|
||||
"tools": {
|
||||
"delegate_work": "Delegate a specific task to one of the following co-workers:\n{coworkers}.\nThe input to this tool should be the role of the coworker, the task you want them to do, and ALL necessary context to exectue the task, they know nothing about the task, so share absolute everything you know, don't reference things but instead explain them.",
|
||||
"ask_question": "Ask a specific question to one of the following co-workers:\n{coworkers}.\nThe input to this tool should be the role of the coworker, the question you have for them, and ALL necessary context to ask the question properly, they know nothing about the question, so share absolute everything you know, don't reference things but instead explain them."
|
||||
"delegate_work": "Delegate a specific task to one of the following co-workers: {coworkers}\nThe input to this tool should be the coworker, the task you want them to do, and ALL necessary context to exectue the task, they know nothing about the task, so share absolute everything you know, don't reference things but instead explain them.",
|
||||
"ask_question": "Ask a specific question to one of the following co-workers: {coworkers}\nThe input to this tool should be the coworker, the question you have for them, and ALL necessary context to ask the question properly, they know nothing about the question, so share absolute everything you know, don't reference things but instead explain them."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from .converter import Converter, ConverterError
|
||||
from .i18n import I18N
|
||||
from .instructor import Instructor
|
||||
from .logger import Logger
|
||||
from .printer import Printer
|
||||
from .prompts import Prompts
|
||||
|
||||
87
src/crewai/utilities/converter.py
Normal file
87
src/crewai/utilities/converter.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import json
|
||||
from typing import Any, Optional
|
||||
|
||||
from langchain.schema import HumanMessage, SystemMessage
|
||||
from langchain_openai import ChatOpenAI
|
||||
from pydantic import BaseModel, Field, PrivateAttr, model_validator
|
||||
|
||||
|
||||
class ConverterError(Exception):
|
||||
"""Error raised when Converter fails to parse the input."""
|
||||
|
||||
def __init__(self, message: str, *args: object) -> None:
|
||||
super().__init__(message, *args)
|
||||
self.message = message
|
||||
|
||||
|
||||
class Converter(BaseModel):
|
||||
"""Class that converts text into either pydantic or json."""
|
||||
|
||||
_is_gpt: bool = PrivateAttr(default=True)
|
||||
text: str = Field(description="Text to be converted.")
|
||||
llm: Any = Field(description="The language model to be used to convert the text.")
|
||||
model: Any = Field(description="The model to be used to convert the text.")
|
||||
instructions: str = Field(description="Conversion instructions to the LLM.")
|
||||
max_attemps: Optional[int] = Field(
|
||||
description="Max number of attemps to try to get the output formated.",
|
||||
default=3,
|
||||
)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_llm_provider(self):
|
||||
if not self._is_gpt(self.llm):
|
||||
self._is_gpt = False
|
||||
|
||||
def to_pydantic(self, current_attempt=1):
|
||||
"""Convert text to pydantic."""
|
||||
try:
|
||||
if self._is_gpt:
|
||||
return self._create_instructor().to_pydantic()
|
||||
else:
|
||||
return self._create_chain().invoke({})
|
||||
except Exception as e:
|
||||
if current_attempt < self.max_attemps:
|
||||
return self.to_pydantic(current_attempt + 1)
|
||||
return ConverterError(
|
||||
f"Failed to convert text into a pydantic model due to the following error: {e}"
|
||||
)
|
||||
|
||||
def to_json(self, current_attempt=1):
|
||||
"""Convert text to json."""
|
||||
try:
|
||||
if self._is_gpt:
|
||||
return self._create_instructor().to_json()
|
||||
else:
|
||||
return json.dumps(self._create_chain().invoke({}).model_dump())
|
||||
except Exception:
|
||||
if current_attempt < self.max_attemps:
|
||||
return self.to_json(current_attempt + 1)
|
||||
return ConverterError("Failed to convert text into JSON.")
|
||||
|
||||
def _create_instructor(self):
|
||||
"""Create an instructor."""
|
||||
from crewai.utilities import Instructor
|
||||
|
||||
inst = Instructor(
|
||||
llm=self.llm,
|
||||
max_attemps=self.max_attemps,
|
||||
model=self.model,
|
||||
content=self.text,
|
||||
instructions=self.instructions,
|
||||
)
|
||||
return inst
|
||||
|
||||
def _create_chain(self):
|
||||
"""Create a chain."""
|
||||
from crewai.utilities.crew_pydantic_output_parser import (
|
||||
CrewPydanticOutputParser,
|
||||
)
|
||||
|
||||
parser = CrewPydanticOutputParser(pydantic_object=self.model)
|
||||
new_prompt = HumanMessage(content=self.text) + SystemMessage(
|
||||
content=self.instructions
|
||||
)
|
||||
return new_prompt | self.llm | parser
|
||||
|
||||
def _is_gpt(self, llm) -> bool:
|
||||
return isinstance(llm, ChatOpenAI) and llm.openai_api_base == None
|
||||
43
src/crewai/utilities/crew_pydantic_output_parser.py
Normal file
43
src/crewai/utilities/crew_pydantic_output_parser.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import json
|
||||
from typing import Any, List, Type, Union
|
||||
|
||||
import regex
|
||||
from langchain.output_parsers import PydanticOutputParser
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
from langchain_core.outputs import Generation
|
||||
from langchain_core.pydantic_v1 import ValidationError
|
||||
from pydantic import BaseModel
|
||||
from pydantic.v1 import BaseModel as V1BaseModel
|
||||
|
||||
|
||||
class CrewPydanticOutputParser(PydanticOutputParser):
|
||||
"""Parses the text into pydantic models"""
|
||||
|
||||
pydantic_object: Union[Type[BaseModel], Type[V1BaseModel]]
|
||||
|
||||
def parse_result(self, result: List[Generation], *, partial: bool = False) -> Any:
|
||||
result[0].text = self._transform_in_valid_json(result[0].text)
|
||||
json_object = super().parse_result(result)
|
||||
try:
|
||||
return self.pydantic_object.parse_obj(json_object)
|
||||
except ValidationError as e:
|
||||
name = self.pydantic_object.__name__
|
||||
msg = f"Failed to parse {name} from completion {json_object}. Got: {e}"
|
||||
raise OutputParserException(msg, llm_output=json_object)
|
||||
|
||||
def _transform_in_valid_json(self, text) -> str:
|
||||
text = text.replace("```", "").replace("json", "")
|
||||
json_pattern = r"\{(?:[^{}]|(?R))*\}"
|
||||
matches = regex.finditer(json_pattern, text)
|
||||
|
||||
for match in matches:
|
||||
try:
|
||||
# Attempt to parse the matched string as JSON
|
||||
json_obj = json.loads(match.group())
|
||||
# Return the first successfully parsed JSON object
|
||||
json_obj = json.dumps(json_obj)
|
||||
return str(json_obj)
|
||||
except json.JSONDecodeError:
|
||||
# If parsing fails, skip to the next match
|
||||
continue
|
||||
return text
|
||||
50
src/crewai/utilities/instructor.py
Normal file
50
src/crewai/utilities/instructor.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
import instructor
|
||||
from pydantic import BaseModel, Field, PrivateAttr, model_validator
|
||||
|
||||
|
||||
class Instructor(BaseModel):
|
||||
"""Class that wraps an agent llm with instructor."""
|
||||
|
||||
_client: Any = PrivateAttr()
|
||||
content: str = Field(description="Content to be sent to the instructor.")
|
||||
agent: Optional[Any] = Field(
|
||||
description="The agent that needs to use instructor.", default=None
|
||||
)
|
||||
llm: Optional[Any] = Field(
|
||||
description="The agent that needs to use instructor.", default=None
|
||||
)
|
||||
instructions: Optional[str] = Field(
|
||||
description="Instructions to be sent to the instructor.",
|
||||
default=None,
|
||||
)
|
||||
model: Type[BaseModel] = Field(
|
||||
description="Pydantic model to be used to create an output."
|
||||
)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def set_instructor(self):
|
||||
"""Set instructor."""
|
||||
if self.agent and not self.llm:
|
||||
self.llm = self.agent.function_calling_llm or self.agent.llm
|
||||
|
||||
self._client = instructor.patch(
|
||||
self.llm.client._client,
|
||||
mode=instructor.Mode.TOOLS,
|
||||
)
|
||||
return self
|
||||
|
||||
def to_json(self):
|
||||
model = self.to_pydantic()
|
||||
return model.model_dump_json(indent=2)
|
||||
|
||||
def to_pydantic(self):
|
||||
messages = [{"role": "user", "content": self.content}]
|
||||
if self.instructions:
|
||||
messages.append({"role": "system", "content": self.instructions})
|
||||
|
||||
model = self._client.chat.completions.create(
|
||||
model=self.llm.model_name, response_model=self.model, messages=messages
|
||||
)
|
||||
return model
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import ClassVar
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from langchain.prompts import PromptTemplate, BasePromptTemplate
|
||||
from langchain.prompts import BasePromptTemplate, PromptTemplate
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai.utilities import I18N
|
||||
@@ -10,12 +10,18 @@ class Prompts(BaseModel):
|
||||
"""Manages and generates prompts for a generic agent with support for different languages."""
|
||||
|
||||
i18n: I18N = Field(default=I18N())
|
||||
|
||||
tools: list[Any] = Field(default=[])
|
||||
SCRATCHPAD_SLICE: ClassVar[str] = "\n{agent_scratchpad}"
|
||||
|
||||
def task_execution_with_memory(self) -> BasePromptTemplate:
|
||||
"""Generate a prompt for task execution with memory components."""
|
||||
return self._build_prompt(["role_playing", "tools", "memory", "task"])
|
||||
slices = ["role_playing"]
|
||||
if len(self.tools) > 0:
|
||||
slices.append("tools")
|
||||
else:
|
||||
slices.append("no_tools")
|
||||
slices.extend(["memory", "task"])
|
||||
return self._build_prompt(slices)
|
||||
|
||||
def task_execution_without_tools(self) -> BasePromptTemplate:
|
||||
"""Generate a prompt for task execution without tools components."""
|
||||
@@ -23,10 +29,17 @@ class Prompts(BaseModel):
|
||||
|
||||
def task_execution(self) -> BasePromptTemplate:
|
||||
"""Generate a standard prompt for task execution."""
|
||||
return self._build_prompt(["role_playing", "tools", "task"])
|
||||
slices = ["role_playing"]
|
||||
if len(self.tools) > 0:
|
||||
slices.append("tools")
|
||||
else:
|
||||
slices.append("no_tools")
|
||||
slices.append("task")
|
||||
return self._build_prompt(slices)
|
||||
|
||||
def _build_prompt(self, components: list[str]) -> BasePromptTemplate:
|
||||
"""Constructs a prompt string from specified components."""
|
||||
prompt_parts = [self.i18n.slice(component) for component in components]
|
||||
prompt_parts.append(self.SCRATCHPAD_SLICE)
|
||||
return PromptTemplate.from_template("".join(prompt_parts))
|
||||
prompt = PromptTemplate.from_template("".join(prompt_parts))
|
||||
return prompt
|
||||
|
||||
40
src/crewai/utilities/pydantic_schema_parser.py
Normal file
40
src/crewai/utilities/pydantic_schema_parser.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import Type, get_args, get_origin
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class PydanticSchemaParser(BaseModel):
|
||||
model: Type[BaseModel]
|
||||
|
||||
def get_schema(self) -> str:
|
||||
"""
|
||||
Public method to get the schema of a Pydantic model.
|
||||
|
||||
:param model: The Pydantic model class to generate schema for.
|
||||
:return: String representation of the model schema.
|
||||
"""
|
||||
return self._get_model_schema(self.model)
|
||||
|
||||
def _get_model_schema(self, model, depth=0) -> str:
|
||||
lines = []
|
||||
for field_name, field in model.model_fields.items():
|
||||
field_type_str = self._get_field_type(field, depth + 1)
|
||||
lines.append(f"{' ' * 4 * depth}- {field_name}: {field_type_str}")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _get_field_type(self, field, depth) -> str:
|
||||
field_type = field.annotation
|
||||
if get_origin(field_type) is list:
|
||||
list_item_type = get_args(field_type)[0]
|
||||
if isinstance(list_item_type, type) and issubclass(
|
||||
list_item_type, BaseModel
|
||||
):
|
||||
nested_schema = self._get_model_schema(list_item_type, depth + 1)
|
||||
return f"List[\n{nested_schema}\n{' ' * 4 * depth}]"
|
||||
else:
|
||||
return f"List[{list_item_type.__name__}]"
|
||||
elif issubclass(field_type, BaseModel):
|
||||
return f"\n{self._get_model_schema(field_type, depth)}"
|
||||
else:
|
||||
return field_type.__name__
|
||||
60
src/crewai/utilities/token_counter_callback.py
Normal file
60
src/crewai/utilities/token_counter_callback.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import tiktoken
|
||||
from langchain.callbacks.base import BaseCallbackHandler
|
||||
from langchain.schema import LLMResult
|
||||
|
||||
|
||||
class TokenProcess:
|
||||
total_tokens: int = 0
|
||||
prompt_tokens: int = 0
|
||||
completion_tokens: int = 0
|
||||
successful_requests: int = 0
|
||||
|
||||
def sum_prompt_tokens(self, tokens: int):
|
||||
self.prompt_tokens = self.prompt_tokens + tokens
|
||||
self.total_tokens = self.total_tokens + tokens
|
||||
|
||||
def sum_completion_tokens(self, tokens: int):
|
||||
self.completion_tokens = self.completion_tokens + tokens
|
||||
self.total_tokens = self.total_tokens + tokens
|
||||
|
||||
def sum_successful_requests(self, requests: int):
|
||||
self.successful_requests = self.successful_requests + requests
|
||||
|
||||
def get_summary(self) -> str:
|
||||
return {
|
||||
"total_tokens": self.total_tokens,
|
||||
"prompt_tokens": self.prompt_tokens,
|
||||
"completion_tokens": self.completion_tokens,
|
||||
"successful_requests": self.successful_requests,
|
||||
}
|
||||
|
||||
|
||||
class TokenCalcHandler(BaseCallbackHandler):
|
||||
model: str = ""
|
||||
token_cost_process: TokenProcess
|
||||
|
||||
def __init__(self, model, token_cost_process):
|
||||
self.model = model
|
||||
self.token_cost_process = token_cost_process
|
||||
|
||||
def on_llm_start(
|
||||
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
|
||||
) -> None:
|
||||
if "gpt" in self.model:
|
||||
encoding = tiktoken.encoding_for_model(self.model)
|
||||
else:
|
||||
encoding = tiktoken.get_encoding("cl100k_base")
|
||||
|
||||
if self.token_cost_process == None:
|
||||
return
|
||||
|
||||
for prompt in prompts:
|
||||
self.token_cost_process.sum_prompt_tokens(len(encoding.encode(prompt)))
|
||||
|
||||
async def on_llm_new_token(self, token: str, **kwargs) -> None:
|
||||
self.token_cost_process.sum_completion_tokens(1)
|
||||
|
||||
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
||||
self.token_cost_process.sum_successful_requests(1)
|
||||
@@ -4,11 +4,13 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from langchain.tools import tool
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
from crewai import Agent, Crew, Task
|
||||
from crewai.agents.cache import CacheHandler
|
||||
from crewai.agents.executor import CrewAgentExecutor
|
||||
from crewai.agents.parser import CrewAgentParser
|
||||
from crewai.tools.tool_calling import InstructorToolCalling
|
||||
from crewai.tools.tool_usage import ToolUsage
|
||||
from crewai.utilities import RPMController
|
||||
@@ -104,7 +106,7 @@ def test_agent_execution_with_tools():
|
||||
|
||||
task = Task(description="What is 3 times 4?", agent=agent)
|
||||
output = agent.execute_task(task)
|
||||
assert output == "3 times 4 equals 12."
|
||||
assert output == "The result of 3 times 4 is 12."
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -131,7 +133,7 @@ def test_logging_tool_usage():
|
||||
tool_usage = InstructorToolCalling(
|
||||
tool_name=multiplier.name, arguments={"first_number": 3, "second_number": 4}
|
||||
)
|
||||
assert output == "3 times 4 equals 12."
|
||||
assert output == "The result of multiplying 3 by 4 is 12."
|
||||
assert agent.tools_handler.last_used_tool.tool_name == tool_usage.tool_name
|
||||
assert agent.tools_handler.last_used_tool.arguments == tool_usage.arguments
|
||||
|
||||
@@ -206,7 +208,7 @@ def test_agent_execution_with_specific_tools():
|
||||
|
||||
task = Task(description="What is 3 times 4", agent=agent)
|
||||
output = agent.execute_task(task=task, tools=[multiplier])
|
||||
assert output == "3 times 4 is 12."
|
||||
assert output == "12"
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -241,7 +243,7 @@ def test_agent_custom_max_iterations():
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_repeated_tool_usage(capsys):
|
||||
@tool
|
||||
def get_final_answer(numbers) -> float:
|
||||
def get_final_answer(anything: str) -> float:
|
||||
"""Get the final answer but don't give it yet, just re-use this
|
||||
tool non-stop."""
|
||||
return 42
|
||||
@@ -251,11 +253,13 @@ def test_agent_repeated_tool_usage(capsys):
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
max_iter=4,
|
||||
llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
allow_delegation=False,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="The final answer is 42. But don't give it yet, instead keep using the `get_final_answer` tool."
|
||||
description="The final answer is 42. But don't give it until I tell you so, instead keep using the `get_final_answer` tool."
|
||||
)
|
||||
# force cleaning cache
|
||||
agent.tools_handler.cache = CacheHandler()
|
||||
@@ -266,10 +270,7 @@ def test_agent_repeated_tool_usage(capsys):
|
||||
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert (
|
||||
"I just used the get_final_answer tool with input 42. So I already know that and must stop using it in a row with the same input. \nI could give my final answer if I'm ready, using exaclty the expected format bellow: \n\nThought: Do I need to use a tool? No\nFinal Answer: [your response here]\n"
|
||||
in captured.out
|
||||
)
|
||||
assert "Final Answer: 42" in captured.out
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -301,7 +302,7 @@ def test_agent_moved_on_after_max_iterations():
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_respect_the_max_rpm_set(capsys):
|
||||
@tool
|
||||
def get_final_answer(numbers) -> float:
|
||||
def get_final_answer(anything: str) -> float:
|
||||
"""Get the final answer but don't give it yet, just re-use this
|
||||
tool non-stop."""
|
||||
return 42
|
||||
@@ -319,7 +320,7 @@ def test_agent_respect_the_max_rpm_set(capsys):
|
||||
with patch.object(RPMController, "_wait_for_next_minute") as moveon:
|
||||
moveon.return_value = True
|
||||
task = Task(
|
||||
description="The final answer is 42. But don't give it yet, instead keep using the `get_final_answer` tool, unless you're told otherwise"
|
||||
description="Use tool logic for `get_final_answer` but fon't give you final answer yet, instead keep using it unless you're told to give your final answer"
|
||||
)
|
||||
output = agent.execute_task(
|
||||
task=task,
|
||||
@@ -327,7 +328,7 @@ def test_agent_respect_the_max_rpm_set(capsys):
|
||||
)
|
||||
assert (
|
||||
output
|
||||
== "I have used the tool 'get_final_answer' with the input '42' multiple times and have observed the same result. Therefore, I am confident to conclude that the final answer is '42'."
|
||||
== 'The result of using the `get_final_answer` tool with the input "test input" is 42.'
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "Max RPM reached, waiting for next minute to start." in captured.out
|
||||
@@ -417,7 +418,7 @@ def test_agent_without_max_rpm_respet_crew_rpm(capsys):
|
||||
moveon.return_value = True
|
||||
crew.kickoff()
|
||||
captured = capsys.readouterr()
|
||||
assert "Action: get_final_answer" in captured.out
|
||||
assert "get_final_answer" in captured.out
|
||||
assert "Max RPM reached, waiting for next minute to start." in captured.out
|
||||
moveon.assert_called_once()
|
||||
|
||||
@@ -429,7 +430,7 @@ def test_agent_error_on_parsing_tool(capsys):
|
||||
from langchain.tools import tool
|
||||
|
||||
@tool
|
||||
def get_final_answer(numbers) -> float:
|
||||
def get_final_answer(anything: str) -> float:
|
||||
"""Get the final answer but don't give it yet, just re-use this
|
||||
tool non-stop."""
|
||||
return 42
|
||||
@@ -454,10 +455,7 @@ def test_agent_error_on_parsing_tool(capsys):
|
||||
force_exception.side_effect = Exception("Error on parsing tool.")
|
||||
crew.kickoff()
|
||||
captured = capsys.readouterr()
|
||||
assert (
|
||||
"It seems we encountered an unexpected error while trying to use the tool"
|
||||
in captured.out
|
||||
)
|
||||
assert "Error on parsing tool." in captured.out
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -467,7 +465,7 @@ def test_agent_remembers_output_format_after_using_tools_too_many_times():
|
||||
from langchain.tools import tool
|
||||
|
||||
@tool
|
||||
def get_final_answer(numbers) -> float:
|
||||
def get_final_answer(anything: str) -> float:
|
||||
"""Get the final answer but don't give it yet, just re-use this
|
||||
tool non-stop."""
|
||||
return 42
|
||||
@@ -476,12 +474,12 @@ def test_agent_remembers_output_format_after_using_tools_too_many_times():
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
max_iter=4,
|
||||
max_iter=6,
|
||||
verbose=True,
|
||||
)
|
||||
tasks = [
|
||||
Task(
|
||||
description="Never give the final answer. Use the get_final_answer tool in a loop.",
|
||||
description="Use tool logic for `get_final_answer` but fon't give you final answer yet, instead keep using it unless you're told to give your final answer",
|
||||
agent=agent1,
|
||||
tools=[get_final_answer],
|
||||
)
|
||||
@@ -553,7 +551,7 @@ def test_agent_step_callback():
|
||||
def test_agent_function_calling_llm():
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
llm = ChatOpenAI(model="gpt-3.5")
|
||||
llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
|
||||
|
||||
with patch.object(llm.client, "create", wraps=llm.client.create) as private_mock:
|
||||
|
||||
@@ -567,6 +565,7 @@ def test_agent_function_calling_llm():
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
tools=[learn_about_AI],
|
||||
llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
function_calling_llm=llm,
|
||||
)
|
||||
|
||||
@@ -579,3 +578,39 @@ def test_agent_function_calling_llm():
|
||||
|
||||
crew.kickoff()
|
||||
private_mock.assert_called()
|
||||
|
||||
|
||||
def test_agent_count_formatting_error():
|
||||
from unittest.mock import patch
|
||||
|
||||
agent1 = Agent(
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
parser = CrewAgentParser()
|
||||
parser.agent = agent1
|
||||
|
||||
with patch.object(Agent, "increment_formatting_errors") as mock_count_errors:
|
||||
test_text = "This text does not match expected formats."
|
||||
with pytest.raises(OutputParserException):
|
||||
parser.parse(test_text)
|
||||
mock_count_errors.assert_called_once()
|
||||
|
||||
|
||||
def test_agent_llm_uses_token_calc_handler_with_llm_has_model_name():
|
||||
agent1 = Agent(
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
assert len(agent1.llm.callbacks) == 1
|
||||
assert agent1.llm.callbacks[0].__class__.__name__ == "TokenCalcHandler"
|
||||
assert agent1.llm.callbacks[0].model == "gpt-4"
|
||||
assert (
|
||||
agent1.llm.callbacks[0].token_cost_process.__class__.__name__ == "TokenProcess"
|
||||
)
|
||||
|
||||
@@ -51,7 +51,7 @@ def test_delegate_work_to_wrong_agent():
|
||||
|
||||
assert (
|
||||
result
|
||||
== "\nError executing tool. Co-worker mentioned on the Action Input not found, it must to be one of the following options:\n- researcher.\n"
|
||||
== "\nError executing tool. Co-worker mentioned not found, it must to be one of the following options:\n- researcher\n"
|
||||
)
|
||||
|
||||
|
||||
@@ -64,5 +64,5 @@ def test_ask_question_to_wrong_agent():
|
||||
|
||||
assert (
|
||||
result
|
||||
== "\nError executing tool. Co-worker mentioned on the Action Input not found, it must to be one of the following options:\n- researcher.\n"
|
||||
== "\nError executing tool. Co-worker mentioned not found, it must to be one of the following options:\n- researcher\n"
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,28 +1,21 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n(''multiplier: multiplier(first_number: int, second_number: int) ->
|
||||
float - Useful for when you need to multiply two numbers together.'',)\n\nTo
|
||||
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
||||
to use a tool? Yes\nAction: the tool you wanna use, should be one of [multiplier],
|
||||
just the name.\nAction Input: Any and all relevant information input and context
|
||||
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
||||
have a response for your task, or if you do not need to use a tool, you MUST
|
||||
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: What is 3
|
||||
times 4\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream":
|
||||
personal goal is: test goalTo complete the task you MUST follow the format:\n\n```\nFinal
|
||||
Answer: [your most complete final answer goes here]\n``` You must use these
|
||||
formats, my life depends on it.This is the summary of your work so far:\nBegin!
|
||||
This is VERY important to you, your job depends on it!\n\nCurrent Task: What
|
||||
is 3 times 4\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream":
|
||||
true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1050'
|
||||
- '511'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
@@ -47,123 +40,27 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Do"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Yes"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
multiplier"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Input"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
{\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"first"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"12"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"second"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"}"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrE6b7f4GrMPaunp8xcwcWQE9kn","object":"chat.completion.chunk","created":1707810664,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-8uAN3ZtdzWEuH23HGRoPRRtzvfGBZ","object":"chat.completion.chunk","created":1708396925,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -174,7 +71,7 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7beb6d1c15b6-SJC
|
||||
- 858364efbe7e01b0-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
@@ -182,14 +79,14 @@ interactions:
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:51:05 GMT
|
||||
- Tue, 20 Feb 2024 02:42:05 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=vLLyq2TwwUBcVIkoEqea4eezcekN2nHPTIaRK8B7A6A-1707810665-1.0-AfRzjd7fYnrHf+Z5wnw9ZsWugvZvYZO+O+xtzZCimC/VosJjGzIFCYYWFojEW9it0+wqlPgjAhD8i8aIz43d+6Y=;
|
||||
path=/; expires=Tue, 13-Feb-24 08:21:05 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=A4_gsEewjfI_cxZT.kzwJOFlNMwGcVQ79F2fvRwT0GU-1708396925-1.0-Acz2vnKqtu4QQDdoAEAvUGGWPfgH2233lNKW2S0hpo4P7GLtP1zHbDcmpxbJKfZnVlmOgzQjlTUGTtkAn4jqBOc=;
|
||||
path=/; expires=Tue, 20-Feb-24 03:12:05 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=xAlsN4tpUk6egjAfe12ByLvmPPMpPCRpXivDHVBFlgA-1707810665071-0.0-604800000;
|
||||
- _cfuvid=jbzd0_VB5OSlOP6ehIcdvPI61zEtxDnSKYD1UiuNCFw-1708396925948-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
@@ -200,9 +97,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '412'
|
||||
- '261'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -214,316 +111,13 @@ interactions:
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299760'
|
||||
- '299890'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 47ms
|
||||
- 22ms
|
||||
x-request-id:
|
||||
- req_3fadda2f3ccff527c42e84a0558b2bd8
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "system", "content": "\n The
|
||||
schema should have the following structure, only two key:\n -
|
||||
tool_name: str\n - arguments: dict (with all
|
||||
arguments being passed)\n\n Example:\n {\"tool_name\":
|
||||
\"tool_name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}\n "},
|
||||
{"role": "user", "content": "Tools available:\n\nTool Name: multiplier\nTool
|
||||
Description: multiplier(first_number: int, second_number: int) -> float - Useful
|
||||
for when you need to multiply two numbers together.\nTool Arguments: {''first_number'':
|
||||
{''type'': ''integer''}, ''second_number'': {''type'': ''integer''}}\n\nReturn
|
||||
a valid schema for the tool, use this text to inform a valid ouput schema:\n\nTool
|
||||
Name: multiplier\nTool Arguments: {\"first_number\": 3, \"second_number\": 4}```"}],
|
||||
"model": "gpt-4", "function_call": {"name": "InstructorToolCalling"}, "functions":
|
||||
[{"name": "InstructorToolCalling", "description": "Correctly extracted `InstructorToolCalling`
|
||||
with all the required parameters with correct types", "parameters": {"properties":
|
||||
{"tool_name": {"description": "The name of the tool to be called.", "title":
|
||||
"Tool Name", "type": "string"}, "arguments": {"description": "A dictinary of
|
||||
arguments to be passed to the tool.", "title": "Arguments", "type": "object"}},
|
||||
"required": ["arguments", "tool_name"], "type": "object"}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1514'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=vLLyq2TwwUBcVIkoEqea4eezcekN2nHPTIaRK8B7A6A-1707810665-1.0-AfRzjd7fYnrHf+Z5wnw9ZsWugvZvYZO+O+xtzZCimC/VosJjGzIFCYYWFojEW9it0+wqlPgjAhD8i8aIz43d+6Y=;
|
||||
_cfuvid=xAlsN4tpUk6egjAfe12ByLvmPPMpPCRpXivDHVBFlgA-1707810665071-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1yRT2/bMAzF7/4UBM/OkKRpEvjaAUWxQy9F0aIeDEVRHG0SqUp0/yzwdx+UpK7b
|
||||
iyDw8f30RB4KALRbrAD1Xon2wU3WcR+v14kfX3+9vD4+823Q9v7m3+Ln9eZhjmV28OaP0fLh+qHZ
|
||||
B2fEMp1kHY0Sk6mz1XS1nk2Xy+VR8Lw1LtvaIJPFZLqcXZwde7baJKzgqQAAOBzPnI225g0rmJYf
|
||||
FW9SUq3BamgCwMguV1ClZJMoEiw/Rc0khnJc6pwbCbuOdE7daOXcFyAAkvJH5A0liZ0WjnfM7ko5
|
||||
Z6kd4QFQxbbzhiTnx0NNNQqzazKhxgpq9J0TG5w1scYy64Mj64cadzYmaajzm9xSwUUJNSajmbaj
|
||||
6qKvqcfh5f5864fZOG5D5E369lXcWbJp30SjElMOmYTDCZQhv4876L6MFUNkH6QR/msoA+eLyxMP
|
||||
P9c9UldnUViUG9fnxTkhpvckxjc7S62JIdphJUVf/AcAAP//AwD1e1FwiQIAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7bfa0a5315b6-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:51:08 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '2207'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299770'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 45ms
|
||||
x-request-id:
|
||||
- req_4f8d63af1db9145604765b7823e6a134
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n(''multiplier: multiplier(first_number: int, second_number: int) ->
|
||||
float - Useful for when you need to multiply two numbers together.'',)\n\nTo
|
||||
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
||||
to use a tool? Yes\nAction: the tool you wanna use, should be one of [multiplier],
|
||||
just the name.\nAction Input: Any and all relevant information input and context
|
||||
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
||||
have a response for your task, or if you do not need to use a tool, you MUST
|
||||
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: What is 3
|
||||
times 4\nThought: Do I need to use a tool? Yes\nAction: multiplier\nAction Input:
|
||||
{\"first_number\": 3, \"second_number\": 4}\nObservation: 12\nThought: "}],
|
||||
"model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream": true, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1194'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=vLLyq2TwwUBcVIkoEqea4eezcekN2nHPTIaRK8B7A6A-1707810665-1.0-AfRzjd7fYnrHf+Z5wnw9ZsWugvZvYZO+O+xtzZCimC/VosJjGzIFCYYWFojEW9it0+wqlPgjAhD8i8aIz43d+6Y=;
|
||||
_cfuvid=xAlsN4tpUk6egjAfe12ByLvmPPMpPCRpXivDHVBFlgA-1707810665071-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
No"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
times"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"12"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhrJQTVo1qcDIgXXkw155n80m3fT","object":"chat.completion.chunk","created":1707810669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7c090f7115b6-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:51:09 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '447'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299726'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 54ms
|
||||
x-request-id:
|
||||
- req_fc64b9b6f818bc5aa798793d5783f7b2
|
||||
- req_5d13f96a4d7cb8b5c7b58fc21d96c80f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
@@ -538,22 +132,22 @@ interactions:
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: What
|
||||
is 3 times 4\nAI: 3 times 4 is 12.\n\nNew summary:"}], "model": "gpt-4", "n":
|
||||
1, "stream": false, "temperature": 0.7}'
|
||||
is 3 times 4\nAI: 12\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream":
|
||||
false, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '885'
|
||||
- '871'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=vLLyq2TwwUBcVIkoEqea4eezcekN2nHPTIaRK8B7A6A-1707810665-1.0-AfRzjd7fYnrHf+Z5wnw9ZsWugvZvYZO+O+xtzZCimC/VosJjGzIFCYYWFojEW9it0+wqlPgjAhD8i8aIz43d+6Y=;
|
||||
_cfuvid=xAlsN4tpUk6egjAfe12ByLvmPPMpPCRpXivDHVBFlgA-1707810665071-0.0-604800000
|
||||
- __cf_bm=A4_gsEewjfI_cxZT.kzwJOFlNMwGcVQ79F2fvRwT0GU-1708396925-1.0-Acz2vnKqtu4QQDdoAEAvUGGWPfgH2233lNKW2S0hpo4P7GLtP1zHbDcmpxbJKfZnVlmOgzQjlTUGTtkAn4jqBOc=;
|
||||
_cfuvid=jbzd0_VB5OSlOP6ehIcdvPI61zEtxDnSKYD1UiuNCFw-1708396925948-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
@@ -577,27 +171,27 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SQQWvCQBCF7/kVw55VEo3G5tZCwVJ6agtCKbJJxmTrZmfZnYBB/O9lY9T2sof3
|
||||
5pt9804RgFCVyEGUjeSytXq6do17TT8375ut28SH5zdHT0Xvsq0ujpmYBIKKHyz5Ss1Kaq1GVmQu
|
||||
dulQMoatSRZn6yReZfFgtFShDlhteZpO41WyGImGVIle5PAVAQCchjdkMxUeRQ4DPygtei9rFPlt
|
||||
CEA40kER0nvlWRoWk7tZkmE0Q9yPBqHpWmlA+oMHbhAeX4AJ2k6zsrqHBRQ9pBOQprraDr0lU4Vx
|
||||
yYPo0HeaQXlI5jMx/nS+RdRUW0dFOMd0Wt/0vTLKNzuH0pMJcTyTveDnCOB7qKL7d52wjlrLO6YD
|
||||
mrAwWS4v+8S99bs7T0eTiaX+Q2UP0ZhQ+N4ztru9MjU669TQTMgZnaNfAAAA//8DACsW1C8QAgAA
|
||||
IRQIACBK2nT3Muk0P0HircDgoOmp5f+7CW4f72isiqKxeOJlYRhgAfeiNUQjzuZGxZCcdyiyGfn8
|
||||
9lcAnL9Tgm+zl/7NeB2OhpXDYraaq5dXv324/dmO3mx/+H39eaI3DhkIgO51od56SRS5GOi5sxSI
|
||||
XD6n3imR1skoH1fjrALfGPeuNCU49X1YhEmV5lKzMzd/Ux0l7gQA/M5aAPveDymRBKvghcEoiT8B
|
||||
bJ1WlOBL1827/sX2DAiVtHVDEjyfKTSI2H3pll2Pjz0rO+gdHD4c5CiftYHizdgrO0jBNvne4XPe
|
||||
z5BmEXXC/8rtaTf1rXvtKGEHrTFI+GY8FdeaoQS73vktvH8BPBihPtzrEX3rjO+ferdUtqNEWozP
|
||||
MmjvCoNk6ah8j0AFc9M6EVpixuAfeZrM7VS1vp3bJG0HrcW/MAAD
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7c14798815b6-SJC
|
||||
- 858364f3f9c101b0-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:51:12 GMT
|
||||
- Tue, 20 Feb 2024 02:42:07 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
@@ -609,9 +203,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '2088'
|
||||
- '737'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -623,13 +217,13 @@ interactions:
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299794'
|
||||
- '299798'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 41ms
|
||||
- 40ms
|
||||
x-request-id:
|
||||
- req_eb2a14859f9f296e36283cec901116cb
|
||||
- req_afa9aab4213989f7a5f167139b479a8c
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
@@ -1,28 +1,27 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n(''multiplier: multiplier(first_number: int, second_number: int) ->
|
||||
float - Useful for when you need to multiply two numbers together.'',)\n\nTo
|
||||
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
||||
to use a tool? Yes\nAction: the tool you wanna use, should be one of [multiplier],
|
||||
just the name.\nAction Input: Any and all relevant information input and context
|
||||
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
||||
have a response for your task, or if you do not need to use a tool, you MUST
|
||||
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: What is 3
|
||||
times 4?\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream":
|
||||
true, "temperature": 0.7}'
|
||||
personal goal is: test goalYou have access to ONLY the following tools, use
|
||||
one at time:\n\nmultiplier: multiplier(first_number: int, second_number: int)
|
||||
-> float - Useful for when you need to multiply two numbers together.\n\nTo
|
||||
use a tool you MUST use the exact following format:\n\n```\nUse Tool: the tool
|
||||
you wanna use, should be one of [multiplier] and absolute all relevant input
|
||||
and context for using the tool, you must use only one tool at once.\nResult:
|
||||
[result of the tool]\n```\n\nTo complete the task you MUST follow the format:\n\n```\nFinal
|
||||
Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT, DO NOT LEAVE ANYTHING OUT]\n```
|
||||
You must use these formats, my life depends on it.This is the summary of your
|
||||
work so far:\nBegin! This is VERY important to you, your job depends on it!\n\nCurrent
|
||||
Task: What is 3 times 4?\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1051'
|
||||
- '1003'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
@@ -47,123 +46,82 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Do"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Yes"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
multiplier"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
with"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Input"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
first"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
{\""},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"first"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
as"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\""},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"second"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
second"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
as"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"}"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqbBUIPcszzNDkFqcJONl5uyRjU","object":"chat.completion.chunk","created":1707810625,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-8uAMHBJxGwYmDm0whtJ7ryA3FNzz9","object":"chat.completion.chunk","created":1708396877,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -174,7 +132,7 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7af66cad9655-SJC
|
||||
- 858363c36d03a4ba-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
@@ -182,14 +140,14 @@ interactions:
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:25 GMT
|
||||
- Tue, 20 Feb 2024 02:41:18 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=1KOow.3J0t2OxoVQi5mb0rWx.eRB.e_i_QzfhmSDUss-1707810625-1-AYnrUrvGx81Jze9dSAFeX9JhHwV2P2fecA0Im/jPPedRz/Gk/RtK4W0+baUlCe8wRVmXlI0/3nR4HhLcA9W1dPI=;
|
||||
path=/; expires=Tue, 13-Feb-24 08:20:25 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=pxRSwqCB1TUHGyPk7A3gNpxP.p.GG4odLvnn1enCzA0-1708396878-1.0-AbvCo+vNuHWyCl3Gm6D//s2A4XFGiLKt9NtnD5752PrzioOdLnmpwWLpDreFWIOxV13kHVhfEi1Y2KMEjaxfDtY=;
|
||||
path=/; expires=Tue, 20-Feb-24 03:11:18 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=lwFk3pBXS_caF6.lmxqR4UVI9FMIKSttmrWBXrT4Klk-1707810625774-0-604800000;
|
||||
- _cfuvid=ZXYFqXlrgq6ZaXX8prqOY4rU1o6I8IG2Hu4kEeiJ7Rw-1708396878059-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
@@ -200,122 +158,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '225'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299760'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 48ms
|
||||
x-request-id:
|
||||
- req_198e3204ee23b23d5eb5eeaf840dcd6e
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "system", "content": "\n The
|
||||
schema should have the following structure, only two key:\n -
|
||||
tool_name: str\n - arguments: dict (with all
|
||||
arguments being passed)\n\n Example:\n {\"tool_name\":
|
||||
\"tool_name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}\n "},
|
||||
{"role": "user", "content": "Tools available:\n\nTool Name: multiplier\nTool
|
||||
Description: multiplier(first_number: int, second_number: int) -> float - Useful
|
||||
for when you need to multiply two numbers together.\nTool Arguments: {''first_number'':
|
||||
{''type'': ''integer''}, ''second_number'': {''type'': ''integer''}}\n\nReturn
|
||||
a valid schema for the tool, use this text to inform a valid ouput schema:\n\nTool
|
||||
Name: multiplier\nTool Arguments: {\"first_number\": 3, \"second_number\": 4}```"}],
|
||||
"model": "gpt-4", "function_call": {"name": "InstructorToolCalling"}, "functions":
|
||||
[{"name": "InstructorToolCalling", "description": "Correctly extracted `InstructorToolCalling`
|
||||
with all the required parameters with correct types", "parameters": {"properties":
|
||||
{"tool_name": {"description": "The name of the tool to be called.", "title":
|
||||
"Tool Name", "type": "string"}, "arguments": {"description": "A dictinary of
|
||||
arguments to be passed to the tool.", "title": "Arguments", "type": "object"}},
|
||||
"required": ["arguments", "tool_name"], "type": "object"}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1514'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=1KOow.3J0t2OxoVQi5mb0rWx.eRB.e_i_QzfhmSDUss-1707810625-1-AYnrUrvGx81Jze9dSAFeX9JhHwV2P2fecA0Im/jPPedRz/Gk/RtK4W0+baUlCe8wRVmXlI0/3nR4HhLcA9W1dPI=;
|
||||
_cfuvid=lwFk3pBXS_caF6.lmxqR4UVI9FMIKSttmrWBXrT4Klk-1707810625774-0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1xRS2/bMAy++1cQPCeD06ZN6muHrr0O7XqYB0NRGFudJGoSPbQN/N8H5ensIgjf
|
||||
S5/IbQGAZo0VoO6UaBfsdBm7P9T+oNXn/cfN3be/j9+7r/Ly+ird+8MnTrKDV2+k5ej6otkFS2LY
|
||||
72kdSQnl1NmiXCxn5e3Vckc4XpPNtjbIdD4tb2fXB0fHRlPCCn4WAADb3Zm7+TW9YwXl5Ig4Skm1
|
||||
hNVJBICRbUZQpWSSKC84OZOavZDPdX1v7YjY9F7n1o1W1l4EAqBXbhf55JPEXgvHZ2Z7r6w1vh3F
|
||||
A6CKbe/IS+6P2xqF2TbZX2MFNbreignWUKxxAvVZnultjRsTkzS+d6usqOA6ixJp9usROh8GPD06
|
||||
HG7DaSyW2xB5lf77JW6MN6lrIqnEPvdLwmEflEN+7cbfX0wUQ2QXpBH+TT4HXs1v9nl43vSIPZLC
|
||||
ouwIX5TFoSGmjyTkmo3xLcUQzWkbxVD8AwAA//8DAAe0NN+EAgAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b0baffb9655-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:31 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '2535'
|
||||
- '334'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -331,229 +176,45 @@ interactions:
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 45ms
|
||||
- 46ms
|
||||
x-request-id:
|
||||
- req_f0341a808828a455435564e19aae93b7
|
||||
- req_78cdca068954cd60d8c31a21a1591022
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n(''multiplier: multiplier(first_number: int, second_number: int) ->
|
||||
float - Useful for when you need to multiply two numbers together.'',)\n\nTo
|
||||
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
||||
to use a tool? Yes\nAction: the tool you wanna use, should be one of [multiplier],
|
||||
just the name.\nAction Input: Any and all relevant information input and context
|
||||
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
||||
have a response for your task, or if you do not need to use a tool, you MUST
|
||||
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: What is 3
|
||||
times 4?\nThought: Do I need to use a tool? Yes\nAction: multiplier\nAction
|
||||
Input: {\"first_number\": 3, \"second_number\": 4}\nObservation: 12\nThought:
|
||||
"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream": true, "temperature":
|
||||
0.7}'
|
||||
body: '{"messages": [{"role": "user", "content": "Tools available:\n\nTool Name:
|
||||
multiplier\nTool Description: multiplier(first_number: int, second_number: int)
|
||||
-> float - Useful for when you need to multiply two numbers together.\nTool
|
||||
Arguments: {''first_number'': {''type'': ''integer''}, ''second_number'': {''type'':
|
||||
''integer''}}\n\nReturn a valid schema for the tool, the tool name must be equal
|
||||
one of the options, use this text to inform a valid ouput schema:\nUse Tool:
|
||||
multiplier, with the first_number as 3 and the second_number as 4.```"}, {"role":
|
||||
"system", "content": "The schema should have the following structure, only two
|
||||
keys:\n- tool_name: str\n- arguments: dict (with all arguments being passed)\n\nExample:\n{\"tool_name\":
|
||||
\"tool_name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}\n"}],
|
||||
"model": "gpt-4", "tool_choice": {"type": "function", "function": {"name": "InstructorToolCalling"}},
|
||||
"tools": [{"type": "function", "function": {"name": "InstructorToolCalling",
|
||||
"description": "Correctly extracted `InstructorToolCalling` with all the required
|
||||
parameters with correct types", "parameters": {"properties": {"tool_name": {"description":
|
||||
"The name of the tool to be called.", "title": "Tool Name", "type": "string"},
|
||||
"arguments": {"description": "A dictinary of arguments to be passed to the tool.",
|
||||
"title": "Arguments", "type": "object"}}, "required": ["arguments", "tool_name"],
|
||||
"type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1195'
|
||||
- '1427'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=1KOow.3J0t2OxoVQi5mb0rWx.eRB.e_i_QzfhmSDUss-1707810625-1-AYnrUrvGx81Jze9dSAFeX9JhHwV2P2fecA0Im/jPPedRz/Gk/RtK4W0+baUlCe8wRVmXlI0/3nR4HhLcA9W1dPI=;
|
||||
_cfuvid=lwFk3pBXS_caF6.lmxqR4UVI9FMIKSttmrWBXrT4Klk-1707810625774-0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
No"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
times"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
equals"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"12"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqhnZyqMjC0VI3FdgGRP7VGRtxz","object":"chat.completion.chunk","created":1707810631,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b1dbb5e9655-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:31 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '508'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299725'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 54ms
|
||||
x-request-id:
|
||||
- req_05c53af3bfe5ebe6490149585032dead
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: What
|
||||
is 3 times 4?\nAI: 3 times 4 equals 12.\n\nNew summary:"}], "model": "gpt-4",
|
||||
"n": 1, "stream": false, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '890'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=1KOow.3J0t2OxoVQi5mb0rWx.eRB.e_i_QzfhmSDUss-1707810625-1-AYnrUrvGx81Jze9dSAFeX9JhHwV2P2fecA0Im/jPPedRz/Gk/RtK4W0+baUlCe8wRVmXlI0/3nR4HhLcA9W1dPI=;
|
||||
_cfuvid=lwFk3pBXS_caF6.lmxqR4UVI9FMIKSttmrWBXrT4Klk-1707810625774-0-604800000
|
||||
- __cf_bm=pxRSwqCB1TUHGyPk7A3gNpxP.p.GG4odLvnn1enCzA0-1708396878-1.0-AbvCo+vNuHWyCl3Gm6D//s2A4XFGiLKt9NtnD5752PrzioOdLnmpwWLpDreFWIOxV13kHVhfEi1Y2KMEjaxfDtY=;
|
||||
_cfuvid=ZXYFqXlrgq6ZaXX8prqOY4rU1o6I8IG2Hu4kEeiJ7Rw-1708396878059-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
@@ -577,28 +238,29 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SQwW7CMBBE7/mKlc+ACARocyunVlUrVUJFqKqQcZbE4NiudyNaIf69cgjQXnyY
|
||||
2Rm/3WMCIHQhchCqkqxqb/p3ofraPc7l2zxbpPvX5aF5Wanpqnx/LpZj0YsJt9mh4ktqoFztDbJ2
|
||||
9myrgJIxtqaz4ewuHU7H49aoXYEmxkrP/aw/nKZdoaqcVkgih48EAODYvpHNFvgtchj2LkqNRLJE
|
||||
kV+HAERwJipCEmliaVn0bqZyltG2uIsKoWpqaUHSnoArhIcnYAdKGtUYyQhjYF0jQdYDaYvLSEDy
|
||||
zhYxIrkVpaUDBtAE6Wggut9OV0zjSh/cJq5kG2Ou+lZbTdU6oCRnIxKx8+f4KQH4bM/R/NtQ+OBq
|
||||
z2t2e7SxMJ1Mzn3idvmbO8o6kx1L8yc1u086QkE/xFivt9qWGHzQ7XUiZ3JKfgEAAP//AwADDMfm
|
||||
FAIAAA==
|
||||
IcwMACBG6nT3Mv39HEJtBWDlfDIkUSsylf33pxoESBftQaG1aL0L/QQogsFceHRxRAucFqhYm2tz
|
||||
s96t5BBRvoD23r8IoM4owHSr6tQG0100Oxcnr+XV8f7r2Pl9o5vdneHLw8nn8/mjZScC6JP3PK05
|
||||
mZ6LgaW9g5CRy1/lGQWG88FivJwt5gviGeuz3FCAm1B3J93BbDjmern1Os0rCrxFAPD3NgF47v2I
|
||||
AoNOK+mFoVIAfwSw9CanAFVV6apWrmYHANLWzQq4xhjiUu29iVNljHD8if59g0n6qzImnqv64GV5
|
||||
Xy8vxpunqgqHt4OP64f5o0y8e/EnDIrywOjFULQxITAH0CmrkHriQrh/z5f33ps9ZYx2m85cgIu9
|
||||
QQH+SQdI7S44ZXNJAclj6teC0Xkp2UFRnJikwAVAcq3Lqo63wlqRFBh3qFRMeXh9OJEOaKVrqQPa
|
||||
toCbXuGYdirW0fhNKH1SrQzD9HhIvBXZGgVY1T6I8dsIWJnMsLGCMwylt6GOa/+Ru4oCo2lBaJ0A
|
||||
IY8nFfxMAkDzRotJNEqWGQtuxGvtNnkZSm1BRtRGAw==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b2acb559655-SJC
|
||||
- 858363ccdaa8a4ba-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:35 GMT
|
||||
- Tue, 20 Feb 2024 02:41:20 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
@@ -610,9 +272,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1629'
|
||||
- '1805'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -624,13 +286,284 @@ interactions:
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299793'
|
||||
- '299805'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 41ms
|
||||
- 38ms
|
||||
x-request-id:
|
||||
- req_a6731ec59a7e505ad280af3f51dbfe3a
|
||||
- req_2267942750c31c5ca2874ac7955f01ba
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalYou have access to ONLY the following tools, use
|
||||
one at time:\n\nmultiplier: multiplier(first_number: int, second_number: int)
|
||||
-> float - Useful for when you need to multiply two numbers together.\n\nTo
|
||||
use a tool you MUST use the exact following format:\n\n```\nUse Tool: the tool
|
||||
you wanna use, should be one of [multiplier] and absolute all relevant input
|
||||
and context for using the tool, you must use only one tool at once.\nResult:
|
||||
[result of the tool]\n```\n\nTo complete the task you MUST follow the format:\n\n```\nFinal
|
||||
Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT, DO NOT LEAVE ANYTHING OUT]\n```
|
||||
You must use these formats, my life depends on it.This is the summary of your
|
||||
work so far:\nBegin! This is VERY important to you, your job depends on it!\n\nCurrent
|
||||
Task: What is 3 times 4?\nUse Tool: multiplier, with the first_number as 3 and
|
||||
the second_number as 4.\nResult: 12\nIf you don''t need to use any more tools,
|
||||
use the correct format for your final answer:\n\n```Final Answer: [your most
|
||||
complete final answer goes here]```\nThought: "}], "model": "gpt-4", "n": 1,
|
||||
"stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1257'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=pxRSwqCB1TUHGyPk7A3gNpxP.p.GG4odLvnn1enCzA0-1708396878-1.0-AbvCo+vNuHWyCl3Gm6D//s2A4XFGiLKt9NtnD5752PrzioOdLnmpwWLpDreFWIOxV13kHVhfEi1Y2KMEjaxfDtY=;
|
||||
_cfuvid=ZXYFqXlrgq6ZaXX8prqOY4rU1o6I8IG2Hu4kEeiJ7Rw-1708396878059-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
result"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
times"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"12"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMLnUD3TMLOxxlfdFdC7pXnV7ML","object":"chat.completion.chunk","created":1708396881,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858363d9ea45a4ba-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 20 Feb 2024 02:41:21 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '269'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299708'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 58ms
|
||||
x-request-id:
|
||||
- req_45e08b4c819934e33f9b4554aa25a548
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: What
|
||||
is 3 times 4?\nAI: The result of 3 times 4 is 12.\n\nNew summary:"}], "model":
|
||||
"gpt-4", "n": 1, "stream": false, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '900'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=pxRSwqCB1TUHGyPk7A3gNpxP.p.GG4odLvnn1enCzA0-1708396878-1.0-AbvCo+vNuHWyCl3Gm6D//s2A4XFGiLKt9NtnD5752PrzioOdLnmpwWLpDreFWIOxV13kHVhfEi1Y2KMEjaxfDtY=;
|
||||
_cfuvid=ZXYFqXlrgq6ZaXX8prqOY4rU1o6I8IG2Hu4kEeiJ7Rw-1708396878059-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
IWAIACDGO/f6GHXvdMsf4wYM5VBp4/+7CS73Pt5RYwHtjT+wskCzPFxDNN3mE39EbaJ2m9q/vwJg
|
||||
9U4JvpUvw5t2yl+MKwcHQ3x1OpmvFu3m195scn4xW1Vv8714oCcA2tc6exuYNAgxUCtrIPBy+VL2
|
||||
Tol4Hi3S5WyxSA6/aPueKUqwcIM/8aNZnLIWS1u9ZT0l7gQA/K46APvetygRebsQhSFQAt8A7KzK
|
||||
KMGXvq/64cXoSibEtHVjEjwvMzSIOHzpm77Hx8mVHXyWLwPqjm2UDRuDzZGijlYDkwDnWMjD1n/v
|
||||
kVG4yQhXPeIkoJrsX98jZQvX2deeEmZUyg7IAYee6m0tUIL9YJ0I/V8AD35pj6a+ouusdsPTYJvM
|
||||
9JSIpwuZiFfQQHDJbDR+TKDJ1BYToSXjs/wzT3lliqxzXeUmrxmVEv/CAAM=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858363e08e96a4ba-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 20 Feb 2024 02:41:23 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1506'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299790'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 42ms
|
||||
x-request-id:
|
||||
- req_47af60e4e16d4cf08c4a07328530376e
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,983 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Crew Manager.\nYou are
|
||||
a seasoned manager with a knack for getting the best out of your team.\nYou
|
||||
are also known for your ability to delegate work to the right people, and to
|
||||
ask the right questions to get the best out of your team.\nEven though you don''t
|
||||
perform tasks by yourself, you have a lot of experience in the field, which
|
||||
allows you to properly evaluate the work of your team members.\n\nYour personal
|
||||
goal is: Manage the team to complete the task in the best way possible.I have
|
||||
access to ONLY the following tools, I can use only these, use one at time:\n\nDelegate
|
||||
work to co-worker: Delegate work to co-worker(coworker: str, task: str, context:
|
||||
str) - Delegate a specific task to one of the following co-workers: - Researcher\nThe
|
||||
input to this tool should be the coworker, the task you want them to do, and
|
||||
ALL necessary context to exectue the task, they know nothing about the task,
|
||||
so share absolute everything you know, don''t reference things but instead explain
|
||||
them.\nAsk question to co-worker: Ask question to co-worker(coworker: str, question:
|
||||
str, context: str) - Ask a specific question to one of the following co-workers:
|
||||
- Researcher\nThe input to this tool should be the coworker, the question you
|
||||
have for them, and ALL necessary context to ask the question properly, they
|
||||
know nothing about the question, so share absolute everything you know, don''t
|
||||
reference things but instead explain them.\n\nTo use a tool I MUST use the exact
|
||||
following format:\n\n```\nUse Tool: the tool I wanna use, should be one of [Delegate
|
||||
work to co-worker, Ask question to co-worker] and absolute all relevant input
|
||||
and context for using the tool, I must use only one tool at once.\nResult: [result
|
||||
of the tool]\n```\n\nTo give my final answer I''ll use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: say howdy\nYour final answer must be: Howdy!\n\n Begin! This is VERY important
|
||||
to you, your job depends on it!\n\n\n"}], "model": "gpt-4", "n": 1, "stop":
|
||||
["\nResult"], "stream": true, "temperature": 0.0}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '2223'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Delegate"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
work"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
co"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"-worker"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"cow"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ork"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"er"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Research"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"er"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"task"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
say"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
how"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"context"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
We"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
greet"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
someone"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
friendly"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
informal"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
way"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
term"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"how"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
casual"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
greeting"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
often"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
used"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
United"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
States"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
particularly"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
southern"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
western"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
regions"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
It"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''s"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
contraction"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"how"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
you"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
it"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''s"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
equivalent"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
saying"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"hello"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
task"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
simply"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
say"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"how"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6R80VTnILcsYYhu6Kcu2nH5g7dc","object":"chat.completion.chunk","created":1709096786,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c62361cb0300dd-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:06:26 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=5oZs63dZLAiJVsMY5uQ7ak9HK0tVOst7Zje28KTpk.k-1709096786-1.0-AYuae5CK3ehBFpLq3OlQljPTqek/CaKiflLrbel+Bswjwo8HdfZFsMN5VVlV4hNdqchTvTgAp4x8Ncj1tYT5Qa4=;
|
||||
path=/; expires=Wed, 28-Feb-24 05:36:26 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=0kgttv029QX2lRGuZX6J5VUNsz5xgHpcA.CpfE8vI7Q-1709096786875-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '372'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299468'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 106ms
|
||||
x-request-id:
|
||||
- req_f05460238d0247e8c529eddfd65127a2
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: !!binary |
|
||||
CvMiCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkSyiIKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRKuAQoQYNbXjiQFPD01zFrwu5iogRIIYhFnuzmi6a8qEFRvb2wgVXNhZ2UgRXJyb3Iw
|
||||
ATm4C6S2eO63F0GYOqS2eO63F0pmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUi
|
||||
OiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hh
|
||||
dE9wZW5BSSJ9egIYARKCCAoQmjdDdWLBesNvZKHmPF3B1RIIqxnLgk/uxpUqDENyZXcgQ3JlYXRl
|
||||
ZDABOXB1prp47rcXQWDFp7p47rcXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTQuNEoaCg5weXRo
|
||||
b25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRlYjJjYmMzOC1lYzA3LTQ5ZjctODQ5
|
||||
Ny1mODRlMGY2NTYzNzJKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5n
|
||||
dWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2Zf
|
||||
YWdlbnRzEgIYAUraAgoLY3Jld19hZ2VudHMSygIKxwJbeyJpZCI6ICIyZDAzNzU3Mi03Mjk3LTQ5
|
||||
NTktODFkZS04NDMxM2ZhOTJjNjYiLCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgIm1lbW9yeV9lbmFi
|
||||
bGVkPyI6IGZhbHNlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBt
|
||||
IjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9u
|
||||
YW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0
|
||||
T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjog
|
||||
WyJyZXR1cm5fZGF0YSJdfV1KmQEKCmNyZXdfdGFza3MSigEKhwFbeyJpZCI6ICIwZDIyMDUwNS01
|
||||
MmZlLTQ2YjUtODc3MS01YWNiM2Y2YjY3YjAiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAi
|
||||
YWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRvb2xzX25hbWVzIjogWyJyZXR1cm5fZGF0YSJd
|
||||
fV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARLHAQoQD+8RIH+t/MvIeYMta48slxIInVQ/6xygtMcqClRvb2wgVXNh
|
||||
Z2UwATmAMny9eO63F0EwaXy9eO63F0oaCgl0b29sX25hbWUSDQoLcmV0dXJuX2RhdGFKDgoIYXR0
|
||||
ZW1wdHMSAhgBSlkKA2xsbRJSClB7Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtNCIs
|
||||
ICJ0ZW1wZXJhdHVyZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAES5QcKEBg3Ofbq
|
||||
+tFCvKV8Oe5JDssSCIJmxSWDDcLTKgxDcmV3IENyZWF0ZWQwATkoT7TAeO63F0FIl7XAeO63F0oa
|
||||
Cg5jcmV3YWlfdmVyc2lvbhIICgYwLjE0LjRKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEK
|
||||
B2NyZXdfaWQSJgokZGZkZmM2YTItZjNiOC00MWUzLWE4NzYtNGExOWI3NTFkNDAzShwKDGNyZXdf
|
||||
cHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1i
|
||||
ZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFKzAIKC2NyZXdfYWdl
|
||||
bnRzErwCCrkCW3siaWQiOiAiODE2NTU5ZTktNDZlOC00NjMwLTg2ZTYtNjM5OTE1MzQ0MzlmIiwg
|
||||
InJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5hYmxlZD8iOiBmYWxzZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjogW119XUqKAQoKY3Jld190YXNrcxJ8Cnpb
|
||||
eyJpZCI6ICI1OWI2NzZlYi04Y2U3LTQxZTEtYTA5MC1lZGJmNzRjOWY0ZDciLCAiYXN5bmNfZXhl
|
||||
Y3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRvb2xzX25hbWVz
|
||||
IjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0
|
||||
Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBs
|
||||
YXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAy
|
||||
MCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRf
|
||||
VDYwMzBKCgoEY3B1cxICGAx6AhgBEuYHChBQG5baojgWTTAUUORJsU23EghdLLRb9gcgqCoMQ3Jl
|
||||
dyBDcmVhdGVkMAE5kAxcwXjutxdBUGRdwXjutxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xNC40
|
||||
ShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDA0YThiNTNiLWJiZDgt
|
||||
NDZkYy05MDNmLWE0OWI3YzQ4MjU1MkocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1j
|
||||
cmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251
|
||||
bWJlcl9vZl9hZ2VudHMSAhgBSs0CCgtjcmV3X2FnZW50cxK9Agq6Alt7ImlkIjogImZjYzY1ZWJm
|
||||
LTlmOTQtNGZkNi04MzYxLTMzMGJkNzVhMGExZCIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVt
|
||||
b3J5X2VuYWJsZWQ/IjogZmFsc2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwg
|
||||
Im1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBc
|
||||
Im1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wi
|
||||
OiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNf
|
||||
bmFtZXMiOiBbXX1dSooBCgpjcmV3X3Rhc2tzEnwKelt7ImlkIjogIjliNTU1NDU5LTVmMjYtNDli
|
||||
OS1hMTI0LTQ2OTE1MzA0YjljMCIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9y
|
||||
b2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAES4gcK
|
||||
EOtjiZ481WdO6n81Pn20ytUSCErYNg+QhMMWKgxDcmV3IENyZWF0ZWQwATmg6ufEeO63F0GgYenE
|
||||
eO63F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjE0LjRKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokNzY5NWRlMGItOWEwOS00MjBmLWFhMjgtYjEyNjBhOGJlZjJmSh4K
|
||||
DGNyZXdfcHJvY2VzcxIOCgxoaWVyYXJjaGljYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRj
|
||||
cmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrNAgoL
|
||||
Y3Jld19hZ2VudHMSvQIKugJbeyJpZCI6ICJjM2NjMjgxNC03OTczLTQxZmUtYTUwMi01MTY4MjA4
|
||||
MDkwMTYiLCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IGZhbHNlLCAi
|
||||
dmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4i
|
||||
OiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRc
|
||||
IiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRl
|
||||
bGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqEAQoKY3Jld190
|
||||
YXNrcxJ2CnRbeyJpZCI6ICJmMzVhNjZjMC02YTg4LTQxNjktYTEwMC0wNjIwNTIyNTZiNTUiLCAi
|
||||
YXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJOb25lIiwgInRvb2xzX25h
|
||||
bWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBw
|
||||
bGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsK
|
||||
EHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERl
|
||||
YyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJN
|
||||
NjRfVDYwMzBKCgoEY3B1cxICGAx6AhgB
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate, br
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '4470'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.23.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:06:31 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Only tools available:\n###\nTool
|
||||
Name: delegate work to co-worker\nTool Description: Delegate work to co-worker(coworker:
|
||||
str, task: str, context: str) - Delegate a specific task to one of the following
|
||||
co-workers: - Researcher\nThe input to this tool should be the coworker, the
|
||||
task you want them to do, and ALL necessary context to exectue the task, they
|
||||
know nothing about the task, so share absolute everything you know, don''t reference
|
||||
things but instead explain them.\nTool Arguments: {''coworker'': {''type'':
|
||||
''string''}, ''task'': {''type'': ''string''}, ''context'': {''type'': ''string''}}\n--\nTool
|
||||
Name: ask question to co-worker\nTool Description: Ask question to co-worker(coworker:
|
||||
str, question: str, context: str) - Ask a specific question to one of the following
|
||||
co-workers: - Researcher\nThe input to this tool should be the coworker, the
|
||||
question you have for them, and ALL necessary context to ask the question properly,
|
||||
they know nothing about the question, so share absolute everything you know,
|
||||
don''t reference things but instead explain them.\nTool Arguments: {''coworker'':
|
||||
{''type'': ''string''}, ''question'': {''type'': ''string''}, ''context'': {''type'':
|
||||
''string''}}\n\nReturn a valid schema for the tool, the tool name must be exactly
|
||||
equal one of the options, use this text to inform the valid ouput schema:\n\nUse
|
||||
Tool: Delegate work to co-worker\ncoworker: Researcher\ntask: say howdy\ncontext:
|
||||
We need to greet someone in a friendly, informal way. The term \"howdy\" is
|
||||
a casual greeting often used in the United States, particularly in the southern
|
||||
and western regions. It''s a contraction of \"how do you do\", and it''s equivalent
|
||||
to saying \"hello\". The task is to simply say \"howdy\".```"}, {"role": "system",
|
||||
"content": "The schema should have the following structure, only two keys:\n-
|
||||
tool_name: str\n- arguments: dict (with all arguments being passed)\n\nExample:\n{\"tool_name\":
|
||||
\"tool name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}"}],
|
||||
"model": "gpt-4", "tool_choice": {"type": "function", "function": {"name": "InstructorToolCalling"}},
|
||||
"tools": [{"type": "function", "function": {"name": "InstructorToolCalling",
|
||||
"description": "Correctly extracted `InstructorToolCalling` with all the required
|
||||
parameters with correct types", "parameters": {"properties": {"tool_name": {"description":
|
||||
"The name of the tool to be called.", "title": "Tool Name", "type": "string"},
|
||||
"arguments": {"anyOf": [{"type": "object"}, {"type": "null"}], "description":
|
||||
"A dictinary of arguments to be passed to the tool.", "title": "Arguments"}},
|
||||
"required": ["arguments", "tool_name"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '2664'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=5oZs63dZLAiJVsMY5uQ7ak9HK0tVOst7Zje28KTpk.k-1709096786-1.0-AYuae5CK3ehBFpLq3OlQljPTqek/CaKiflLrbel+Bswjwo8HdfZFsMN5VVlV4hNdqchTvTgAp4x8Ncj1tYT5Qa4=;
|
||||
_cfuvid=0kgttv029QX2lRGuZX6J5VUNsz5xgHpcA.CpfE8vI7Q-1709096786875-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
MTQSACC+b86653L6T7MEBfR1AcgDnkRNvVWsjO20fDadUlUVSv5SnYO15VIdl6oDVYlESGSDMFNS
|
||||
LEIiPCgkEomtuaXbu4wHxMBNwZ57KADyFRlQuXS5bFp5PNtOvj6T3uRXbERmN69nL2Ty+tn750P/
|
||||
bviSHhUA6fV/LjOn+mSJQew1QPjO5de4IoP+tDfvzSfTeY94T6MVCxlQ3ebHo8e9SX/IdXWpvuRE
|
||||
Bv8KADg8bQF07v2IDGSQt3wVhkcG+CWAogqTAbmUfMouZHoEAGnr9hqEToRYlVXlsnQiyukdH9pg
|
||||
kH7qRC4nH329ir/7g1Kfff0ZXyw3Lzq/6GY6qXhr13ZKrMAoxAjaJqNwDqDgGoO8N2EL9/dp/K4q
|
||||
z5yID7WwFkAfO0EGdLDB2vZHwTVsycBSxcK1y4yNxhWyotTHG40rjpYe2WClyJYM3FJBKwaWvnLC
|
||||
QMHUzy6tyOnkdljqptpdZLztbeZ98BcjMFfIijoyZyRtWAPDBzgEXvkR6lGtJdi43RN8XzIyxwbW
|
||||
WpJfZS3BJzjUKksl7vhQYzbAKHW2IH1G9lt2mdMjtDvgr3TiouxgfIqAx4K8acMpcwyIXHsN6Qne
|
||||
5LvaRhpy3KO/CV305wNUip12qNRaS1XVZn2+m8A3nU//UM74BwGxDzVjxCIMzLg5l1bwCZj1TSs7
|
||||
JLfTQK198YklG042nMg58MlVQFs5dyD15GOHkWjdRr1OLq9S3kF0WeNtggwoZW3VzE4FcB6L6114
|
||||
1aI2atPmy6wrDokMxuOBTERhDxBSvzcFv+kAEAeT8bzopka5II5cLnyoObbRx2ZYnAoDAw==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6237aee5900dd-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:06:35 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '4513'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299509'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 98ms
|
||||
x-request-id:
|
||||
- req_434efa7f0a5c47ce6c03a8b4ab18dfd7
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Researcher.\nYou''re
|
||||
love to sey howdy.\n\nYour personal goal is: Be super empathetic.To give my
|
||||
final answer use the exact following format:\n\n```\nFinal Answer: [my expected
|
||||
final answer, entire content of my most complete final answer goes here]\n```\nI
|
||||
MUST use these formats, my jobs depends on it!\n\nCurrent Task: say howdy\nThis
|
||||
is the context you''re working with:\nWe need to greet someone in a friendly,
|
||||
informal way. The term \"howdy\" is a casual greeting often used in the United
|
||||
States, particularly in the southern and western regions. It''s a contraction
|
||||
of \"how do you do\", and it''s equivalent to saying \"hello\". The task is
|
||||
to simply say \"howdy\".\n\n Begin! This is VERY important to you, your job
|
||||
depends on it!\n\n\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream":
|
||||
true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '880'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
How"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RHLmZq8uUkVMaJgVLqT5Hq01Uj","object":"chat.completion.chunk","created":1709096795,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6239af966a4c3-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:06:35 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=rQM_3i5vsgWA7RXqaEYRq1KSxd5JDXRScrpMeiwYZm0-1709096795-1.0-ASrbmefvM3BGg5hzZQYt26xfGYuAKFdDMlTLywyI5oK7E5/BO6bxjYQD+uPFr4MSTydfuvnr7l8LTIz3bxQFE7E=;
|
||||
path=/; expires=Wed, 28-Feb-24 05:36:35 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=FF7KcQqaDaMEwpz4eGbgI.lNAMAlEVQAWHqsQtW2lzk-1709096795853-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '213'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299801'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 39ms
|
||||
x-request-id:
|
||||
- req_7d8573a76975cb05491e81b4321361a8
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Crew Manager.\nYou are
|
||||
a seasoned manager with a knack for getting the best out of your team.\nYou
|
||||
are also known for your ability to delegate work to the right people, and to
|
||||
ask the right questions to get the best out of your team.\nEven though you don''t
|
||||
perform tasks by yourself, you have a lot of experience in the field, which
|
||||
allows you to properly evaluate the work of your team members.\n\nYour personal
|
||||
goal is: Manage the team to complete the task in the best way possible.I have
|
||||
access to ONLY the following tools, I can use only these, use one at time:\n\nDelegate
|
||||
work to co-worker: Delegate work to co-worker(coworker: str, task: str, context:
|
||||
str) - Delegate a specific task to one of the following co-workers: - Researcher\nThe
|
||||
input to this tool should be the coworker, the task you want them to do, and
|
||||
ALL necessary context to exectue the task, they know nothing about the task,
|
||||
so share absolute everything you know, don''t reference things but instead explain
|
||||
them.\nAsk question to co-worker: Ask question to co-worker(coworker: str, question:
|
||||
str, context: str) - Ask a specific question to one of the following co-workers:
|
||||
- Researcher\nThe input to this tool should be the coworker, the question you
|
||||
have for them, and ALL necessary context to ask the question properly, they
|
||||
know nothing about the question, so share absolute everything you know, don''t
|
||||
reference things but instead explain them.\n\nTo use a tool I MUST use the exact
|
||||
following format:\n\n```\nUse Tool: the tool I wanna use, should be one of [Delegate
|
||||
work to co-worker, Ask question to co-worker] and absolute all relevant input
|
||||
and context for using the tool, I must use only one tool at once.\nResult: [result
|
||||
of the tool]\n```\n\nTo give my final answer I''ll use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: say howdy\nYour final answer must be: Howdy!\n\n Begin! This is VERY important
|
||||
to you, your job depends on it!\n\n\nUse Tool: Delegate work to co-worker\ncoworker:
|
||||
Researcher\ntask: say howdy\ncontext: We need to greet someone in a friendly,
|
||||
informal way. The term \"howdy\" is a casual greeting often used in the United
|
||||
States, particularly in the southern and western regions. It''s a contraction
|
||||
of \"how do you do\", and it''s equivalent to saying \"hello\". The task is
|
||||
to simply say \"howdy\".\nResult: Howdy!\n\nIf I don''t need to use any more
|
||||
tools, I must make sure use the correct format to give my final answer:\n\n```Final
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]```\n I MUST use these formats, my jobs depends on it!\n"}],
|
||||
"model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
|
||||
0.0}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '2889'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=5oZs63dZLAiJVsMY5uQ7ak9HK0tVOst7Zje28KTpk.k-1709096786-1.0-AYuae5CK3ehBFpLq3OlQljPTqek/CaKiflLrbel+Bswjwo8HdfZFsMN5VVlV4hNdqchTvTgAp4x8Ncj1tYT5Qa4=;
|
||||
_cfuvid=0kgttv029QX2lRGuZX6J5VUNsz5xgHpcA.CpfE8vI7Q-1709096786875-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
How"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6RIEFlgVwMeEcLBEYUfNyt4TKNo","object":"chat.completion.chunk","created":1709096796,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c623a1696900dd-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:06:36 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '288'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299305'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 138ms
|
||||
x-request-id:
|
||||
- req_1000191581c321470f8139624b7cb534
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,128 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Researcher.\nYou''re
|
||||
love to sey howdy.\n\nYour personal goal is: Be super empathetic.To give my
|
||||
final answer use the exact following format:\n\n```\nFinal Answer: [my expected
|
||||
final answer, entire content of my most complete final answer goes here]\n```\nI
|
||||
MUST use these formats, my jobs depends on it!\n\nCurrent Task: say howdy\nYour
|
||||
final answer must be: Howdy!\n\n Begin! This is VERY important to you, your
|
||||
job depends on it!\n\n\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '576'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
How"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6MvBpPA2nu9oAtIveDbDV6gr0jF","object":"chat.completion.chunk","created":1709096525,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c61cfe5aa11abf-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:02:05 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=t1wx1UUHIo40OZhmzvw5Lfl6GU6rBSOqgVYaroZ_oPY-1709096525-1.0-Adu6235ja9DptuJzagyyjisq3WtY+DAJocWHBAx6XJtDZqQ8F9/xqFdCEsEA1j50+7qghMgFIbQ8zjJTiZZR9jE=;
|
||||
path=/; expires=Wed, 28-Feb-24 05:32:05 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=mfFvKXqWOwPiPpWZYg93kKab4M0lyRh10j5DBKVWpZs-1709096525319-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '169'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299875'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 25ms
|
||||
x-request-id:
|
||||
- req_bcadd3e1e6d7917b5f4013487e66ce3b
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,128 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Researcher.\nYou''re
|
||||
love to sey howdy.\n\nYour personal goal is: Be super empathetic.To give my
|
||||
final answer use the exact following format:\n\n```\nFinal Answer: [my expected
|
||||
final answer, entire content of my most complete final answer goes here]\n```\nI
|
||||
MUST use these formats, my jobs depends on it!\n\nCurrent Task: say howdy\nYour
|
||||
final answer must be: Howdy!\n\n Begin! This is VERY important to you, your
|
||||
job depends on it!\n\n\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '576'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
How"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6VCJjr1jrfGEqp8rs8S70MMlNF0","object":"chat.completion.chunk","created":1709097038,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6298c5f1800f2-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:10:39 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=d2ICjM2qYQhS2M37l0hI5SS.xRz8EjklLB.2h7ZIrfM-1709097039-1.0-AaE39xkM2MDu0fdcAOQjTQKyC+FU657D+YE0JBSEhromKV2iHVf/UfmGInkLBt0CXrhLf3w6il+xyCUhogjNACw=;
|
||||
path=/; expires=Wed, 28-Feb-24 05:40:39 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=wtrchvMDCF4qH99ZZbYs3XyIWUSjktLudGDSC4IAnro-1709097039247-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '163'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299875'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 25ms
|
||||
x-request-id:
|
||||
- req_e37a897b189f6ae6544b520ebcdbf005
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,81 +1,21 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: !!binary |
|
||||
CuwKCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkSwwoKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRLMAQoQwp0SIfudEwsrKbf+lo58IBIIf611tHXnM6YqClRvb2wgVXNhZ2UwATlwwj4a
|
||||
iF2zF0FImT8aiF2zF0ofCgl0b29sX25hbWUSEgoQZ2V0X2ZpbmFsX2Fuc3dlckoOCghhdHRlbXB0
|
||||
cxICGAFKWQoDbGxtElIKUHsibmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC00IiwgInRl
|
||||
bXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARLdCAoQOtWPiAfm5nEl
|
||||
WCn75Vb4mRII6VcVnnypR+gqDENyZXcgQ3JlYXRlZDABOQjIxNKIXbMXQbjyxtKIXbMXShoKDmNy
|
||||
ZXdhaV92ZXJzaW9uEggKBjAuMTAuMkoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jl
|
||||
d19pZBImCiQ4ZjMyMmYyNS1jYmIyLTRhZmQtOWY1MC03MmRjYWIxOGUzOTlKHAoMY3Jld19wcm9j
|
||||
ZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9v
|
||||
Zl90YXNrcxICGAJKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrKAgoLY3Jld19hZ2VudHMS
|
||||
ugIKtwJbeyJpZCI6ICI0YzZiNzM0Mi1iZThiLTRiMTItYTQ1Zi0yMDIwNmU0NWQwNTQiLCAicm9s
|
||||
ZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogdHJ1
|
||||
ZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjog
|
||||
IntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVy
|
||||
ZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxl
|
||||
ZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqEAgoKY3Jld190YXNrcxL1AQryAVt7Imlk
|
||||
IjogImJkMGU1OWRhLTc3NDktNDlmMS1iZjEyLWQ2ZjcyMDkyMmZjOSIsICJhc3luY19leGVjdXRp
|
||||
b24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogInRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFtd
|
||||
fSwgeyJpZCI6ICJlNWUxNGIwNS0xZmY5LTQ5OTktOWQ4NS04YjdlMzRiZjA0ZDgiLCAiYXN5bmNf
|
||||
ZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBs
|
||||
YXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQ
|
||||
cGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVj
|
||||
IDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02
|
||||
NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAE=
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1391'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.22.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 08:05:26 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n('''',)\n\nTo use a tool, please use the exact following format:\n\n```\nThought:
|
||||
Do I need to use a tool? Yes\nAction: the tool you wanna use, should be one
|
||||
of [], just the name.\nAction Input: Any and all relevant information input
|
||||
and context for using the tool\nObservation: the result of using the tool\n```\n\nWhen
|
||||
you have a response for your task, or if you do not need to use a tool, you
|
||||
MUST use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: just say
|
||||
hi!\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream": true,
|
||||
personal goal is: test goalTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: just say hi!\n\n Begin! This is VERY important to you, your job depends
|
||||
on it!\n\n\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream": true,
|
||||
"temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '904'
|
||||
- '521'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
@@ -100,71 +40,27 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
No"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Hi"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri580menKdx2UwVSxcCvbrHE69Ui","object":"chat.completion.chunk","created":1707811526,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-8x6vNrtf0qKpk299ZRwX2pR8oyOZc","object":"chat.completion.chunk","created":1709098661,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -175,7 +71,7 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b90f5ce86fb28-SJC
|
||||
- 85c651289e4901c2-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
@@ -183,14 +79,14 @@ interactions:
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 08:05:26 GMT
|
||||
- Wed, 28 Feb 2024 05:37:41 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=eZx_Cc28AsZ4sE9XhSDROTXe.zTSX.5NABIk4QNh4rE-1707811526-1-AUSW1VrxOPxZjbDBkaJGjn3RvnxQi2anKBjm3rtF34M+3WVMXKZnsuFT1NyLSbUlKlHLmk+tH0BFBkkjVf1KNAQ=;
|
||||
path=/; expires=Tue, 13-Feb-24 08:35:26 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=l868Q2j0y1hEh_PyWbaEvyTSK_.xOIvrr6NOCGhTWOs-1709098661-1.0-AcmzcQgeapeNgYWJafurm0jZWKoXZcTElp7cKuollHPUXu89+ZMZ7C+cuD743jI6ZAjoMKnWILI6RpaTc+HijqI=;
|
||||
path=/; expires=Wed, 28-Feb-24 06:07:41 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=1hOKQMgKuc9NQV1lVNIkVHpksu9kDExwfGmwkHTeUl4-1707811526659-0-604800000;
|
||||
- _cfuvid=jv_tDbSPpAoMExt5fhPfzf1WpwjAwVWmq6ao6tfaPKg-1709098661891-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
@@ -201,9 +97,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '400'
|
||||
- '381'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -215,151 +111,38 @@ interactions:
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299796'
|
||||
- '299889'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 40ms
|
||||
- 22ms
|
||||
x-request-id:
|
||||
- req_1e2e3f72498b1c3f5bdfb527b6808aa3
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: just
|
||||
say hi!\nAI: Hi!\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false,
|
||||
"temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '867'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=eZx_Cc28AsZ4sE9XhSDROTXe.zTSX.5NABIk4QNh4rE-1707811526-1-AUSW1VrxOPxZjbDBkaJGjn3RvnxQi2anKBjm3rtF34M+3WVMXKZnsuFT1NyLSbUlKlHLmk+tH0BFBkkjVf1KNAQ=;
|
||||
_cfuvid=1hOKQMgKuc9NQV1lVNIkVHpksu9kDExwfGmwkHTeUl4-1707811526659-0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SQW0sDMRCF3/dXjHlupVu3F/etKHgBhUIFxUpJs9Pd6CaTJrOoSP+7ZLtt9SWQ
|
||||
OTmT75yfBEDoQuQgVCVZGVf3p16PZvf2+uVmsZlvJ09XzzR3j3qLlw+WRS86aP2Oig+uc0XG1cia
|
||||
7F5WHiVj3JpOBpNpmo6G01YwVGAdbaXjftYfjNOLzlGRVhhEDq8JAMBPe0Y2W+CXyGHQO0wMhiBL
|
||||
FPnxEYDwVMeJkCHowLLj7ERFltG2uIsKoWqMtKBtYN8oDsAVwuwOmKD0iBzvpgfSFgfFY3BkiwCf
|
||||
miuQEHSMC0txq8+WQnQf7Y6ENZXO0zqmsU1dH+cbbXWoVh5lIBtpApPb23cJwFvbRPMvnHCejOMV
|
||||
0wfauDDNsv0+cSr9pA67mgQTy/qPa5wlHaEI34HRrDbaluid120xkTPZJb8AAAD//wMADZpmMA8C
|
||||
AAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b91035a06fb28-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 08:05:30 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '1880'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299799'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 40ms
|
||||
x-request-id:
|
||||
- req_939914d4d3f3e4fc959d143817d71fbc
|
||||
- req_c29a9ff74ee32c982c0657b336101735
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n('''',)\n\nTo use a tool, please use the exact following format:\n\n```\nThought:
|
||||
Do I need to use a tool? Yes\nAction: the tool you wanna use, should be one
|
||||
of [], just the name.\nAction Input: Any and all relevant information input
|
||||
and context for using the tool\nObservation: the result of using the tool\n```\n\nWhen
|
||||
you have a response for your task, or if you do not need to use a tool, you
|
||||
MUST use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nThe human instructs
|
||||
the AI to greet them, and the AI responds with a simple \"Hi!\"Begin! This is
|
||||
VERY important to you, your job depends on it!\n\nCurrent Task: just say hello!\nThis
|
||||
is the context you''re working with:\nHi!\n"}], "model": "gpt-4", "n": 1, "stop":
|
||||
["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||
personal goal is: test goalTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: just say hello!\nThis is the context you''re working with:\nHi!\n\n Begin!
|
||||
This is VERY important to you, your job depends on it!\n\n\n"}], "model": "gpt-4",
|
||||
"n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1037'
|
||||
- '571'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=eZx_Cc28AsZ4sE9XhSDROTXe.zTSX.5NABIk4QNh4rE-1707811526-1-AUSW1VrxOPxZjbDBkaJGjn3RvnxQi2anKBjm3rtF34M+3WVMXKZnsuFT1NyLSbUlKlHLmk+tH0BFBkkjVf1KNAQ=;
|
||||
_cfuvid=1hOKQMgKuc9NQV1lVNIkVHpksu9kDExwfGmwkHTeUl4-1707811526659-0-604800000
|
||||
- __cf_bm=l868Q2j0y1hEh_PyWbaEvyTSK_.xOIvrr6NOCGhTWOs-1709098661-1.0-AcmzcQgeapeNgYWJafurm0jZWKoXZcTElp7cKuollHPUXu89+ZMZ7C+cuD743jI6ZAjoMKnWILI6RpaTc+HijqI=;
|
||||
_cfuvid=jv_tDbSPpAoMExt5fhPfzf1WpwjAwVWmq6ao6tfaPKg-1709098661891-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
@@ -382,71 +165,27 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
No"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Hello"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8ri5CPtraPfqxGNtaNwyQfaesdGAb","object":"chat.completion.chunk","created":1707811530,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-8x6vOxu9SxkG3t54pHLQT2Pt0iQ63","object":"chat.completion.chunk","created":1709098662,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -457,7 +196,7 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b91126e42fb28-SJC
|
||||
- 85c6512e293d01c2-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
@@ -465,7 +204,7 @@ interactions:
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 08:05:31 GMT
|
||||
- Wed, 28 Feb 2024 05:37:42 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
@@ -477,9 +216,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '360'
|
||||
- '159'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -491,121 +230,13 @@ interactions:
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299765'
|
||||
- '299876'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 47ms
|
||||
- 24ms
|
||||
x-request-id:
|
||||
- req_9177fada9049cfa726cab195d9a942f5
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\nThe human instructs the AI to greet them, and
|
||||
the AI responds with a simple \"Hi!\"\n\nNew lines of conversation:\nHuman:
|
||||
just say hello!\nThis is the context you''re working with:\nHi!\nAI: Hello!\n\nNew
|
||||
summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1003'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=eZx_Cc28AsZ4sE9XhSDROTXe.zTSX.5NABIk4QNh4rE-1707811526-1-AUSW1VrxOPxZjbDBkaJGjn3RvnxQi2anKBjm3rtF34M+3WVMXKZnsuFT1NyLSbUlKlHLmk+tH0BFBkkjVf1KNAQ=;
|
||||
_cfuvid=1hOKQMgKuc9NQV1lVNIkVHpksu9kDExwfGmwkHTeUl4-1707811526659-0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1xRy24bMQy871cwPK8DO4kf8C0NgqaHokDaS1EXhqKld5VIoiBy0wSB/73Qrh9o
|
||||
LwI0wyGGMx8VALoG14C2M2pD8pNVdvN7r99vv67e7Psjy7ebsLj79PmnfV0o1kXBT89k9ai6tByS
|
||||
J3UcR9pmMkpl62w5Xa5ms/n11UAEbsgXWZt0cjOZLmbXB0XHzpLgGn5VAAAfw1u8xYbecA3T+ogE
|
||||
EjEt4fo0BICZfUHQiDhRE0efB9JyVIqD3R8dQdcHE8FF0dxbFdCO4PYLKEObibT8Qw07l0WPXCZJ
|
||||
HBuBP047MLDBB3exQTCxKSOxhj5xhF2ftaN82u041uD0P/kGH8h7vtjgJR5M7k/XeW5T5qeSROy9
|
||||
P+E7F51020xGOJZLRDmN8n0F8HtIsf8nGEyZQ9Kt8gtFGcqYj/vwXNiZHSsCQGU1/oxfTZfVwSHK
|
||||
uyiF7c7FlnLKbgi1+Kz21V8AAAD//wMAt4zwVksCAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b911cff76fb28-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 08:05:35 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '2599'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299765'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 46ms
|
||||
x-request-id:
|
||||
- req_9590d08c9df508c18c924e19e4e0055d
|
||||
- req_06762f5bedd260cb2740a35c45c78e4e
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,153 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Manager.\nYou''re great
|
||||
at delegating work about scoring.\n\nYour personal goal is: Coordinate scoring
|
||||
processesTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\nYour final answer must be: The score of the title.\n\n
|
||||
Begin! This is VERY important to you, your job depends on it!\n\n\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '712'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
score"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6hAnJdNQ95CMhSACL5TNL0lG6Ws","object":"chat.completion.chunk","created":1709097780,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c63ba94ea60110-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:23:01 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=AaCQFIZM8yehA4h1745UTRRtL0FczZJtdLfNQ6_8NzA-1709097781-1.0-AUIh6/dxRTiveEa2WnhkSYSTau7hn7cRLNnlSfeiJp2fgTieIadq3fkeBHjqHSnQ7k/pE4WZgIZ9SAAmacifrgc=;
|
||||
path=/; expires=Wed, 28-Feb-24 05:53:01 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=o.lLAcb8kPLRizp5FDtYBR4rjdIgMyVXhQ_NLWlcuj8-1709097781239-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '224'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299840'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_3129f92f1bc422dba1aa396cc072a30e
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
3238
tests/cassettes/test_increment_tool_errors.yaml
Normal file
3238
tests/cassettes/test_increment_tool_errors.yaml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,28 +1,27 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n(''multiplier: multiplier(first_number: int, second_number: int) ->
|
||||
float - Useful for when you need to multiply two numbers together.'',)\n\nTo
|
||||
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
||||
to use a tool? Yes\nAction: the tool you wanna use, should be one of [multiplier],
|
||||
just the name.\nAction Input: Any and all relevant information input and context
|
||||
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
||||
have a response for your task, or if you do not need to use a tool, you MUST
|
||||
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: What is 3
|
||||
times 4?\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream":
|
||||
true, "temperature": 0.7}'
|
||||
personal goal is: test goalYou have access to ONLY the following tools, use
|
||||
one at time:\n\nmultiplier: multiplier(first_number: int, second_number: int)
|
||||
-> float - Useful for when you need to multiply two numbers together.\n\nTo
|
||||
use a tool you MUST use the exact following format:\n\n```\nUse Tool: the tool
|
||||
you wanna use, should be one of [multiplier] and absolute all relevant input
|
||||
and context for using the tool, you must use only one tool at once.\nResult:
|
||||
[result of the tool]\n```\n\nTo complete the task you MUST follow the format:\n\n```\nFinal
|
||||
Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT, DO NOT LEAVE ANYTHING OUT]\n```
|
||||
You must use these formats, my life depends on it.This is the summary of your
|
||||
work so far:\nBegin! This is VERY important to you, your job depends on it!\n\nCurrent
|
||||
Task: What is 3 times 4?\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1051'
|
||||
- '1003'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
@@ -47,123 +46,82 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Do"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Yes"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
multiplier"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
with"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Input"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
first"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
{''"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"first"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
being"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"'':"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
''"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"second"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
second"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_number"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"'':"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
being"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"}"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqlgY5EyKnQhtTdxk43p6T5CgNN","object":"chat.completion.chunk","created":1707810635,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-8uAMOk2FBUg2npxeTiSwTJsB2cUdO","object":"chat.completion.chunk","created":1708396884,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -174,7 +132,7 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b380f95fb50-SJC
|
||||
- 858363edbe5c1ac1-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
@@ -182,14 +140,14 @@ interactions:
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:36 GMT
|
||||
- Tue, 20 Feb 2024 02:41:24 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=WtYsZIolwSGU.Cw4hxGTqIv0MRGYu4Anw8lzJON1fgk-1707810636-1-Adr1AXb4JT1VMT15njZVe64VjGKYws2k3wQOAEcin03j8Lk4b6WDtV0AG8+mydIMxiPJ++Wwk5ZpUN+jjSGK1Pg=;
|
||||
path=/; expires=Tue, 13-Feb-24 08:20:36 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=YTdmTzN24Ou6kgt2z_xTK4x5guIXbV0_UVAC3Zzsi5M-1708396884-1.0-Ae8kFLY38Uv+hx7e6bbAap3N2fHI7/wqMZEmRN9LsgLZEhHwzhLdwv1t3XblbV+Bsal2wYnhQ2KQTeowkVdcp18=;
|
||||
path=/; expires=Tue, 20-Feb-24 03:11:24 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=kNhuSk4gM0Tj8M010_XI7rHzNLZx1vXRlB.DYvetRy8-1707810636228-0-604800000;
|
||||
- _cfuvid=KkLrQOr.1VeOh89kGukdRj_iO5I7oPresrBk0t3uJuY-1708396884742-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
@@ -200,122 +158,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '460'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299760'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 48ms
|
||||
x-request-id:
|
||||
- req_9f3693cba92d7c2c99f8c49878a51c17
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "system", "content": "\n The
|
||||
schema should have the following structure, only two key:\n -
|
||||
tool_name: str\n - arguments: dict (with all
|
||||
arguments being passed)\n\n Example:\n {\"tool_name\":
|
||||
\"tool_name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}\n "},
|
||||
{"role": "user", "content": "Tools available:\n\nTool Name: multiplier\nTool
|
||||
Description: multiplier(first_number: int, second_number: int) -> float - Useful
|
||||
for when you need to multiply two numbers together.\nTool Arguments: {''first_number'':
|
||||
{''type'': ''integer''}, ''second_number'': {''type'': ''integer''}}\n\nReturn
|
||||
a valid schema for the tool, use this text to inform a valid ouput schema:\n\nTool
|
||||
Name: multiplier\nTool Arguments: {''first_number'': 3, ''second_number'': 4}```"}],
|
||||
"model": "gpt-4", "function_call": {"name": "InstructorToolCalling"}, "functions":
|
||||
[{"name": "InstructorToolCalling", "description": "Correctly extracted `InstructorToolCalling`
|
||||
with all the required parameters with correct types", "parameters": {"properties":
|
||||
{"tool_name": {"description": "The name of the tool to be called.", "title":
|
||||
"Tool Name", "type": "string"}, "arguments": {"description": "A dictinary of
|
||||
arguments to be passed to the tool.", "title": "Arguments", "type": "object"}},
|
||||
"required": ["arguments", "tool_name"], "type": "object"}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1510'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=WtYsZIolwSGU.Cw4hxGTqIv0MRGYu4Anw8lzJON1fgk-1707810636-1-Adr1AXb4JT1VMT15njZVe64VjGKYws2k3wQOAEcin03j8Lk4b6WDtV0AG8+mydIMxiPJ++Wwk5ZpUN+jjSGK1Pg=;
|
||||
_cfuvid=kNhuSk4gM0Tj8M010_XI7rHzNLZx1vXRlB.DYvetRy8-1707810636228-0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1yRQW/bMAyF7/4VBM/OEDdZEvg2bMXWyzAUO2yYC0ORFUedRCoSDWwI/N8HJanr
|
||||
7iIIfHyfnshzAYC2wxpQH5VoH9xiF48njl+l0v7nl8fq84/7hxN9Oj1+23QfGMvs4P2z0fLieqfZ
|
||||
B2fEMl1lHY0Sk6nVdrndVcvNancRPHfGZVsfZLFeLDfV6uY4stUmYQ2/CgCA8+XM2agzf7CGZflS
|
||||
8SYl1RuspyYAjOxyBVVKNokiwfJV1ExiKMelwbmZcBhI59StVs69AQIgKX9BPlCSOGjh+J3ZfVTO
|
||||
WepneABUsR+8Icn58dxQg8Ls2kxosIYG/eDEBmdNbLDM+uTI+rnBg41JWhr8PrfUsCqhwWQ0Uzer
|
||||
rseGRpxeHm+3cZqN4z5E3qf/vooHSzYd22hUYsohk3C4gjLk6bKD4c1YMUT2QVrh34Yy8G79/srD
|
||||
13XP1O1NFBbl5vW74pYQ098kxrcHS72JIdppJcVY/AMAAP//AwBvuYvviQIAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b4c8925fb50-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:40 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '1333'
|
||||
- '348'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -331,229 +176,46 @@ interactions:
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 45ms
|
||||
- 46ms
|
||||
x-request-id:
|
||||
- req_d9bce0afd53265ec62076e265f8be7ce
|
||||
- req_4b3f5d2f5ff292a15bfdce1e8d121fbd
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
||||
tools:\n\n(''multiplier: multiplier(first_number: int, second_number: int) ->
|
||||
float - Useful for when you need to multiply two numbers together.'',)\n\nTo
|
||||
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
||||
to use a tool? Yes\nAction: the tool you wanna use, should be one of [multiplier],
|
||||
just the name.\nAction Input: Any and all relevant information input and context
|
||||
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
||||
have a response for your task, or if you do not need to use a tool, you MUST
|
||||
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
||||
[your response here]```This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: What is 3
|
||||
times 4?\nThought: Do I need to use a tool? Yes\nAction: multiplier\nAction
|
||||
Input: {''first_number'': 3, ''second_number'': 4}\nObservation: 12\nThought:
|
||||
"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream": true, "temperature":
|
||||
0.7}'
|
||||
body: '{"messages": [{"role": "user", "content": "Tools available:\n\nTool Name:
|
||||
multiplier\nTool Description: multiplier(first_number: int, second_number: int)
|
||||
-> float - Useful for when you need to multiply two numbers together.\nTool
|
||||
Arguments: {''first_number'': {''type'': ''integer''}, ''second_number'': {''type'':
|
||||
''integer''}}\n\nReturn a valid schema for the tool, the tool name must be equal
|
||||
one of the options, use this text to inform a valid ouput schema:\nUse Tool:
|
||||
multiplier, with the first_number being 3 and the second_number being 4.```"},
|
||||
{"role": "system", "content": "The schema should have the following structure,
|
||||
only two keys:\n- tool_name: str\n- arguments: dict (with all arguments being
|
||||
passed)\n\nExample:\n{\"tool_name\": \"tool_name\", \"arguments\": {\"arg_name1\":
|
||||
\"value\", \"arg_name2\": 2}}\n"}], "model": "gpt-4", "tool_choice": {"type":
|
||||
"function", "function": {"name": "InstructorToolCalling"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "InstructorToolCalling", "description": "Correctly
|
||||
extracted `InstructorToolCalling` with all the required parameters with correct
|
||||
types", "parameters": {"properties": {"tool_name": {"description": "The name
|
||||
of the tool to be called.", "title": "Tool Name", "type": "string"}, "arguments":
|
||||
{"description": "A dictinary of arguments to be passed to the tool.", "title":
|
||||
"Arguments", "type": "object"}}, "required": ["arguments", "tool_name"], "type":
|
||||
"object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1191'
|
||||
- '1433'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=WtYsZIolwSGU.Cw4hxGTqIv0MRGYu4Anw8lzJON1fgk-1707810636-1-Adr1AXb4JT1VMT15njZVe64VjGKYws2k3wQOAEcin03j8Lk4b6WDtV0AG8+mydIMxiPJ++Wwk5ZpUN+jjSGK1Pg=;
|
||||
_cfuvid=kNhuSk4gM0Tj8M010_XI7rHzNLZx1vXRlB.DYvetRy8-1707810636228-0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Do"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
use"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
tool"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
No"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
times"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
equals"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"12"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8rhqqdEmcBPxCmMRLTZFyzdYtelgh","object":"chat.completion.chunk","created":1707810640,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b5718b2fb50-SJC
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:41 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
openai-processing-ms:
|
||||
- '452'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299725'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 54ms
|
||||
x-request-id:
|
||||
- req_54f5eae0b42e9964627e87c7f23ccb83
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: What
|
||||
is 3 times 4?\nAI: 3 times 4 equals 12.\n\nNew summary:"}], "model": "gpt-4",
|
||||
"n": 1, "stream": false, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '890'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=WtYsZIolwSGU.Cw4hxGTqIv0MRGYu4Anw8lzJON1fgk-1707810636-1-Adr1AXb4JT1VMT15njZVe64VjGKYws2k3wQOAEcin03j8Lk4b6WDtV0AG8+mydIMxiPJ++Wwk5ZpUN+jjSGK1Pg=;
|
||||
_cfuvid=kNhuSk4gM0Tj8M010_XI7rHzNLZx1vXRlB.DYvetRy8-1707810636228-0-604800000
|
||||
- __cf_bm=YTdmTzN24Ou6kgt2z_xTK4x5guIXbV0_UVAC3Zzsi5M-1708396884-1.0-Ae8kFLY38Uv+hx7e6bbAap3N2fHI7/wqMZEmRN9LsgLZEhHwzhLdwv1t3XblbV+Bsal2wYnhQ2KQTeowkVdcp18=;
|
||||
_cfuvid=KkLrQOr.1VeOh89kGukdRj_iO5I7oPresrBk0t3uJuY-1708396884742-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
@@ -577,28 +239,29 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SQzWrDMBCE736KRWcn2PnHt+TSFgqloS2FUoIib2w1sqRo19AS8u5FjpO0Fx12
|
||||
5hvN7jEBELoUBQhVS1aNN4NFqA/hdf22ft4+3rXZ/e59/rRaL1f63R9IpJFw2y9UfKGGyjXeIGtn
|
||||
z7IKKBljaj7P5os8m03yTmhciSZilefBZJDN8nFP1E4rJFHARwIAcOze2M2W+C0KyNLLpEEiWaEo
|
||||
riYAEZyJEyGJNLG0LNKbqJxltF3dlxqhbhtpQdKegGuE5QOwAyWNao1khDGwbpBgkoK05cUSkLyz
|
||||
ZUQk3zyAh1Yagnw0FP2Hp2tT4yof3DZuZVtjrvOdtprqTUBJzsZWxM6f8VMC8NldpP23pPDBNZ43
|
||||
7PZoY2A+nZ7zxO34N3U070V2LM0fajFK+oaCfoix2ey0rTD4oLsDxZ7JKfkFAAD//wMA43iM4hcC
|
||||
AAA=
|
||||
IcwMACBG6nT3Mv39HEJtBWDlfKQmSkWmsv/+VIMA6aI9KLQWrXehnwBFMJgLjy6OaIHTAhVrc21u
|
||||
1ruVHCLKF9De+xcB1DkFmG2SOrPBdBfNzsXNUCcbnx7cPoyuPtzCbQdP18966m7ZiQD6dKuympPp
|
||||
uRhY2jsIGbn8lcopMJwPFuPlbLGYEc9YnytDAa5D3Z10B7PhmOvlxutMVRR4iwDg720C8Nz7EQUG
|
||||
nVbSC0OlAP4IYOmNogCTqtJVnbiaHQBIWzcr4BpjiEu19ybOEmOE40/07xtM0l8TY+LyTofH4XJb
|
||||
qkQNr2dHo+ff23z7O5WJdy/+hEFRHhi9GIo2JgTmALrEKqSeuBDu3/PlvfdmLzFGu3VnLsDF3qAA
|
||||
/6QDpHYXXGKVpIDkMfVrwWhVSnZQFCcmKXABkHzXZVXHW2GtSAqMO1Qqpjy8PpxIB7TStdQBbVvA
|
||||
Ta9wTDsV62j8OpQ+rVaGYXo8JN6KbI0CrGofxPhtBKxMZthYwRmG0ttQx7X/UK6iwGhaEFonQMjj
|
||||
SQU/kwDQvNFiEo2SZcaCG/G7dmtVhlJbkBG1kQED
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 854b7b602f72fb50-SJC
|
||||
- 858363fafa751ac1-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 13 Feb 2024 07:50:43 GMT
|
||||
- Tue, 20 Feb 2024 02:41:27 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
@@ -610,9 +273,9 @@ interactions:
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- user-z7g4wmlazxqvc5wjyaaaocfz
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1563'
|
||||
- '1356'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
@@ -624,13 +287,288 @@ interactions:
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299793'
|
||||
- '299805'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 41ms
|
||||
- 39ms
|
||||
x-request-id:
|
||||
- req_9b32c164d87a9cdf3f87162980820d52
|
||||
- req_7cefd32633dd126cd0ab7ef07392d33a
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
||||
personal goal is: test goalYou have access to ONLY the following tools, use
|
||||
one at time:\n\nmultiplier: multiplier(first_number: int, second_number: int)
|
||||
-> float - Useful for when you need to multiply two numbers together.\n\nTo
|
||||
use a tool you MUST use the exact following format:\n\n```\nUse Tool: the tool
|
||||
you wanna use, should be one of [multiplier] and absolute all relevant input
|
||||
and context for using the tool, you must use only one tool at once.\nResult:
|
||||
[result of the tool]\n```\n\nTo complete the task you MUST follow the format:\n\n```\nFinal
|
||||
Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT, DO NOT LEAVE ANYTHING OUT]\n```
|
||||
You must use these formats, my life depends on it.This is the summary of your
|
||||
work so far:\nBegin! This is VERY important to you, your job depends on it!\n\nCurrent
|
||||
Task: What is 3 times 4?\nUse Tool: multiplier, with the first_number being
|
||||
3 and the second_number being 4.\nResult: 12\nIf you don''t need to use any
|
||||
more tools, use the correct format for your final answer:\n\n```Final Answer:
|
||||
[your most complete final answer goes here]```\nThought: "}], "model": "gpt-4",
|
||||
"n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1263'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=YTdmTzN24Ou6kgt2z_xTK4x5guIXbV0_UVAC3Zzsi5M-1708396884-1.0-Ae8kFLY38Uv+hx7e6bbAap3N2fHI7/wqMZEmRN9LsgLZEhHwzhLdwv1t3XblbV+Bsal2wYnhQ2KQTeowkVdcp18=;
|
||||
_cfuvid=KkLrQOr.1VeOh89kGukdRj_iO5I7oPresrBk0t3uJuY-1708396884742-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
result"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
multiplying"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
by"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"12"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8uAMSREWkhZerctDqtYFlGURZWJW7","object":"chat.completion.chunk","created":1708396888,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858364053bd91ac1-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Tue, 20 Feb 2024 02:41:28 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '262'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299705'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 58ms
|
||||
x-request-id:
|
||||
- req_08bcc000c2685d350fbf0c4602113500
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: What
|
||||
is 3 times 4?\nAI: The result of multiplying 3 by 4 is 12.\n\nNew summary:"}],
|
||||
"model": "gpt-4", "n": 1, "stream": false, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '909'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=YTdmTzN24Ou6kgt2z_xTK4x5guIXbV0_UVAC3Zzsi5M-1708396884-1.0-Ae8kFLY38Uv+hx7e6bbAap3N2fHI7/wqMZEmRN9LsgLZEhHwzhLdwv1t3XblbV+Bsal2wYnhQ2KQTeowkVdcp18=;
|
||||
_cfuvid=KkLrQOr.1VeOh89kGukdRj_iO5I7oPresrBk0t3uJuY-1708396884742-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
IUwIACBG6nTXMv39HEJtzlYAtBUUYMv3fxPcDv+n2aRoLJ54WVgWYAH3ojVEI063+XRckVNU6NRl
|
||||
3/4KgJuGEqzXb7k2XrdnpTq6vKnrm+rL3o+/74Yn96O9/avzt8X4/J0tAdC9b1WdNUmnxMDYOAuB
|
||||
zuVzqqFEf9qbDeeT2WwufGNcozQluPK5PWr3Jv2h1uzabWqVKPEgAOB31wA4935AiV7rFKowBCXw
|
||||
J4DRaUUJvqW0SfnNZrYAJdq6IQlerhUGROy+pV2a8bGn2kN2KPiwMcTzWWsY3Rmr2gMFW7tJIBPG
|
||||
eRrFyVR0xiahP+jQQ/rv6p52Kx/de6KELVqHAPRv6OWprSlKMGXnTbj/AnhKSbVE+Yg+OuPzS3Y7
|
||||
ZRMl+uO5TcTslxBsMFqlzxIoyZz+bCi8pOzIP/Ky3NiVij5uMpS0RWvxLwwD
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8583640c6a281ac1-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 20 Feb 2024 02:41:30 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1325'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299788'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 42ms
|
||||
x-request-id:
|
||||
- req_ea6386ef1de0b5946d30e8726a561130
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
385
tests/cassettes/test_output_json.yaml
Normal file
385
tests/cassettes/test_output_json.yaml
Normal file
@@ -0,0 +1,385 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo complete the task you MUST follow the format:\n\n```\nFinal Answer:
|
||||
[your most complete final answer goes here]\n``` You must use these formats,
|
||||
my life depends on it.This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: Give me an
|
||||
integer score between 1-5 for the following title: ''The impact of AI in the
|
||||
future of work''\nYour final answer must be: The score of the title.\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '692'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7IupIwlgXQZz9qIAA9I6owhQR5S","object":"chat.completion.chunk","created":1708385136,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8582451f0beb028f-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:37 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=1V74zIjUeClvePt7uQM8Ens0gYwpQ0I1yB8KvB6AuBM-1708385137-1.0-AUb2XCHxPc8wJJFqNpH2YPZDK6dTFaR6jR9yJXuXF8duu+iueFC7/sW6HR15ryRZ2TqB1WL5jWChGv0opfdPyzw=;
|
||||
path=/; expires=Mon, 19-Feb-24 23:55:37 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=s.AUfRqF0hy2obc.qG7BQ7proP2d4zdnesALgtxPXLA-1708385137080-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '214'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299845'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_9c6506f8d97318b9cf2f3bc86c8ae18f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: Give
|
||||
me an integer score between 1-5 for the following title: ''The impact of AI
|
||||
in the future of work''\nYour final answer must be: The score of the title.\nAI:
|
||||
4\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1007'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=1V74zIjUeClvePt7uQM8Ens0gYwpQ0I1yB8KvB6AuBM-1708385137-1.0-AUb2XCHxPc8wJJFqNpH2YPZDK6dTFaR6jR9yJXuXF8duu+iueFC7/sW6HR15ryRZ2TqB1WL5jWChGv0opfdPyzw=;
|
||||
_cfuvid=s.AUfRqF0hy2obc.qG7BQ7proP2d4zdnesALgtxPXLA-1708385137080-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1RRTWsCMRC9768YcvGi4vpRdW89CoVa8FAoRWKc3Y1mMyGZtRXxv5dkrbaXQN7H
|
||||
8ObNJQMQei8KEKqWrBpnBot2vjqtVxM1Wr9OX2g1Lo/nzc4c3pfnt43oRwftDqj41zVU1DiDrMl2
|
||||
tPIoGePUfD5aTBazfDJPREN7NNFWOR5MB6OnfHJz1KQVBlHARwYAcElvzGb3+C0KGPV/kQZDkBWK
|
||||
4i4CEJ5MRIQMQQeWlkX/QSqyjDbF3dQIddtICzIcA3CN8LwCJvCSMX1Zs0HoRaFunFQMVEaNtoku
|
||||
W249RuyL/LEHZEFCUNIkLB/MhrDphlb6hOHPyCijzjodilu6630tQ5XztIsV2NaYO15qq0O99SgD
|
||||
2bhCYHKd/ZoBfKb62n+NCOepcbxlOqIN6QrLbp54XOrBTm/dCiaW5oGP82V2SyjCOTA221LbCr3z
|
||||
OrUZc2bX7AcAAP//AwAKtyBpRAIAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858245239a7f028f-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:39 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1918'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299764'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 47ms
|
||||
x-request-id:
|
||||
- req_b76948e3155f354740f3ad612888069c
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: !!binary |
|
||||
Cp0ICiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS9AcKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRLdBwoQcg34tRhd96/AmwQkSM5uqRIImNQGSbolwMoqDENyZXcgQ3JlYXRlZDABOSjG
|
||||
IOs6Z7UXQdhtI+s6Z7UXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVy
|
||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRmOTcxM2ZhOS0yYWZlLTRjNWYtOTJiYy02ZmUw
|
||||
MjBjZjUyM2NKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
||||
EgIYAUrIAgoLY3Jld19hZ2VudHMSuAIKtQJbeyJpZCI6ICJmZTg2MDYyOS0yMGNjLTRjYjUtOGU1
|
||||
Yy04MjQxNWUxMzliMTEiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1
|
||||
ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJp
|
||||
MThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdw
|
||||
dC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIs
|
||||
ICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KhgEKCmNy
|
||||
ZXdfdGFza3MSeAp2W3siaWQiOiAiNTYyZTVhYzItYzU2Ni00ZTMxLWFkOGQtMzFiZGM1MmQ4ZTQ2
|
||||
IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVyIiwgInRv
|
||||
b2xzX25hbWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJp
|
||||
dEocChBwbGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFy
|
||||
d2luSnsKEHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDog
|
||||
V2VkIERlYyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVB
|
||||
U0VfQVJNNjRfVDYwMzBKCgoEY3B1cxICGAx6AhgB
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1056'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.22.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:40 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "4"}], "model": "gpt-4", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '435'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=1V74zIjUeClvePt7uQM8Ens0gYwpQ0I1yB8KvB6AuBM-1708385137-1.0-AUb2XCHxPc8wJJFqNpH2YPZDK6dTFaR6jR9yJXuXF8duu+iueFC7/sW6HR15ryRZ2TqB1WL5jWChGv0opfdPyzw=;
|
||||
_cfuvid=s.AUfRqF0hy2obc.qG7BQ7proP2d4zdnesALgtxPXLA-1708385137080-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA2xS204iQRB9n6+o1DNsQERgHlc3q9FFIjEaxUyaphhmt2921yQYwr+bnsEZJNsP
|
||||
nUqdPpdU9S4BwGKFKaDcCJbaqe64HN1s9e30YXb2ZytIXb8IObya3T2/9+ZL7ESGXf4lyV+sH9Jq
|
||||
p4gLa2pYehJMUbU/6o0H42F/MKkAbVekIi133D3v9i76gwNjYwtJAVN4TQAAdtUds5kVbTGFXuer
|
||||
oykEkROmzSMA9FbFDooQisDCMHZaUFrDZGJcUyp1BLC1KpNCqda4Prujuh2QUCqbb27e3WyoLsfr
|
||||
azd9Uvn0dxl+/Xw88qulP1wVaF0a2QzmCG/66YkZABqhK+5cWk/3JbuST+gAKHxeajIco+NuYQAW
|
||||
GCJhgSmcL8wevzH2yf/qt0O1b6arbO68XYaTYeG6MEXYZJ5EqEJjYOtqiyj3Vm2x/LYYdN5qxxnb
|
||||
f2Si4MVZLYftf2nByQFjy0K17VE/OcTD8BGYdLYuTE7e+aLZaLJPPgEAAP//AwAkwKMCyAIAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858245313ff6028f-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:40 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '529'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299982'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 3ms
|
||||
x-request-id:
|
||||
- req_245e20d86d49096355dd728c358ab806
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
726
tests/cassettes/test_output_json_to_another_task.yaml
Normal file
726
tests/cassettes/test_output_json_to_another_task.yaml
Normal file
@@ -0,0 +1,726 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo complete the task you MUST follow the format:\n\n```\nFinal Answer:
|
||||
[your most complete final answer goes here]\n``` You must use these formats,
|
||||
my life depends on it.This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: Give me an
|
||||
integer score between 1-5 for the following title: ''The impact of AI in the
|
||||
future of work''\nYour final answer must be: The score of the title.\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '692'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7J9fDrFrqHh6cAJ08lVRaE5H63R","object":"chat.completion.chunk","created":1708385151,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8582457748ea1b22-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:51 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=K3UQ_j6oove.GtqHc7v80NAFRCsPqbFJnemHOjGEOnY-1708385151-1.0-AVMS2ooDMZItzVX1LJ/ipqo4vkTHKZkTrCmkgAkhqpsc+A0wpL7cvImkthYaBkYAzfvpQmd0VNd1qB1J0n6qEHU=;
|
||||
path=/; expires=Mon, 19-Feb-24 23:55:51 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=YZhNQxUXrq6sftsVkCFrqcZHA7_1yiJaNqHeGj3myPo-1708385151407-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '273'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299845'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_7b591be28af534105409d9b18a725ac5
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: Give
|
||||
me an integer score between 1-5 for the following title: ''The impact of AI
|
||||
in the future of work''\nYour final answer must be: The score of the title.\nAI:
|
||||
4\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1007'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=K3UQ_j6oove.GtqHc7v80NAFRCsPqbFJnemHOjGEOnY-1708385151-1.0-AVMS2ooDMZItzVX1LJ/ipqo4vkTHKZkTrCmkgAkhqpsc+A0wpL7cvImkthYaBkYAzfvpQmd0VNd1qB1J0n6qEHU=;
|
||||
_cfuvid=YZhNQxUXrq6sftsVkCFrqcZHA7_1yiJaNqHeGj3myPo-1708385151407-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1RRy2rDMBC8+ysWXXJJQpx3csul9EFvgVJKCbKyttVIWlVa90HJvxfZebQXgWZ2
|
||||
htnZnwxA6L1Yg1C1ZGW9GSybxf3qvSgeHm1zU63M7eZpVtTT+cK557noJwUVb6j4rBoqst4ga3Id
|
||||
rQJKxuSaL0bLyXKWz/KWsLRHk2SV58F0MJrnk5OiJq0wijW8ZAAAP+2bsrk9fok1jPpnxGKMskKx
|
||||
vgwBiEAmIULGqCNLx6J/JRU5RtfG3dYIdWOlAxkPEbhG2NwBEwTJ2H5Zs0HopUFtvVQMVKYZ7Vq6
|
||||
bLgJmLBPCocekAMJUUmDUAaykCez2RC2nXOlPzD+8U2z1OmnQ3GKeLzsZqjygYrUg2uMueCldjrW
|
||||
u4Aykkt7RCbfyY8ZwGvbYfOvFuEDWc87pgO62J5i1fmJ67mu7DQ/kUwszRUfj0fZKaGI35HR7krt
|
||||
Kgw+6LbSlDM7Zr8AAAD//wMA9T8oqkkCAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8582457d19761b22-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:53 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1405'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299764'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 47ms
|
||||
x-request-id:
|
||||
- req_4b043dcb0e13a7d95d4ec55621f4cf10
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "4"}], "model": "gpt-4", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '435'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=K3UQ_j6oove.GtqHc7v80NAFRCsPqbFJnemHOjGEOnY-1708385151-1.0-AVMS2ooDMZItzVX1LJ/ipqo4vkTHKZkTrCmkgAkhqpsc+A0wpL7cvImkthYaBkYAzfvpQmd0VNd1qB1J0n6qEHU=;
|
||||
_cfuvid=YZhNQxUXrq6sftsVkCFrqcZHA7_1yiJaNqHeGj3myPo-1708385151407-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA2xSW0srMRB+318xzHN76Lb24j6KChZBDypVrCxpNt1GcyOZxUvpf5fs1t1azEMY
|
||||
5st3YSbbBABlgRkg3zDi2qn+rJrOz57+n0+G1+ePxXywKMeXl8XiYqULeY29yLCrV8Hph/WPW+2U
|
||||
IGlNA3MvGImomk4Hs9FsnI5HNaBtIVSklY76J/3BJB3tGRsruQiYwXMCALCt75jNFOIDMxj0fjpa
|
||||
hMBKgVn7CAC9VbGDLAQZiBnCXgdya0iYGNdUSh0AZK3KOVOqM27O9qDuBsSUyu+GXx+Tq/dFakbv
|
||||
NyK9vX+Y2ze1fjjwa6Q/XR1oXRneDuYAb/vZkRkAGqZr7h23XtxU5Co6ogMg82WlhaEYHbdLA7DE
|
||||
EAlLzOBkaXb4i7FL/qpf9tWuna6ypfN2FY6GhWtpZNjkXrBQh8ZA1jUWUe6l3mL1azHovNWOcrJv
|
||||
wkTBybCRw+6/dODpHiNLTHXtaZrs42H4DCR0vpamFN552W402SXfAAAA//8DAH9WI1vIAgAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858245876ed71b22-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:54 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '511'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299981'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 3ms
|
||||
x-request-id:
|
||||
- req_62fed5d03d6f6e433ead9e0fd8f98c72
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo complete the task you MUST follow the format:\n\n```\nFinal Answer:
|
||||
[your most complete final answer goes here]\n``` You must use these formats,
|
||||
my life depends on it.This is the summary of your work so far:\nThe human asks
|
||||
the AI to rate the title ''The impact of AI in the future of work'' on a scale
|
||||
from 1 to 5. The AI gives the title a score of 4.Begin! This is VERY important
|
||||
to you, your job depends on it!\n\nCurrent Task: Given the score the title ''The
|
||||
impact of AI in the future of work'' got, give me an integer score between 1-5
|
||||
for the following title: ''Return of the Jedi''\nYour final answer must be:
|
||||
The score of the title.\nThis is the context you''re working with:\n{\n \"score\":
|
||||
4\n}\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '949'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=K3UQ_j6oove.GtqHc7v80NAFRCsPqbFJnemHOjGEOnY-1708385151-1.0-AVMS2ooDMZItzVX1LJ/ipqo4vkTHKZkTrCmkgAkhqpsc+A0wpL7cvImkthYaBkYAzfvpQmd0VNd1qB1J0n6qEHU=;
|
||||
_cfuvid=YZhNQxUXrq6sftsVkCFrqcZHA7_1yiJaNqHeGj3myPo-1708385151407-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7JCEoSVfzOvFdrxX1rZnyNyCrX3","object":"chat.completion.chunk","created":1708385154,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8582458defaf1b22-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:54 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '247'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299782'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 43ms
|
||||
x-request-id:
|
||||
- req_7dec2ec306881cb7e11ebca0c5e2b816
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: !!binary |
|
||||
CpUJCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS7AgKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRLVCAoQxcc8Tu0vsvnrfqdGZ38dJhIIjSLv1qXcr2YqDENyZXcgQ3JlYXRlZDABObhM
|
||||
ODI+Z7UXQZCaOjI+Z7UXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVy
|
||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQ4ZjgxNDhmNS0yN2Y5LTRhYzMtYWJhMi1iZDg5
|
||||
YzYzOWE4NWRKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAJKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
||||
EgIYAUrIAgoLY3Jld19hZ2VudHMSuAIKtQJbeyJpZCI6ICJiNzQxMGU3MC1hYzg5LTQyZWYtYTlj
|
||||
ZS0zYTAzOTRhMThkMGEiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1
|
||||
ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJp
|
||||
MThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdw
|
||||
dC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIs
|
||||
ICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1K/gEKCmNy
|
||||
ZXdfdGFza3MS7wEK7AFbeyJpZCI6ICIyMjBjMzIxNy0wOGY1LTRkMjgtYjRhZC04MDI2YTExNjI4
|
||||
N2UiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTY29yZXIiLCAi
|
||||
dG9vbHNfbmFtZXMiOiBbXX0sIHsiaWQiOiAiMDY0YzZkZDYtN2M4MS00YTBiLWExMWYtOWUwZTYz
|
||||
OTAxMmEzIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVy
|
||||
IiwgInRvb2xzX25hbWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFy
|
||||
bS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0S
|
||||
CAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIz
|
||||
LjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43
|
||||
L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxICGAx6AhgB
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1176'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.22.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:55 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\nThe human asks the AI to rate the title ''The
|
||||
impact of AI in the future of work'' on a scale from 1 to 5. The AI gives the
|
||||
title a score of 4.\n\nNew lines of conversation:\nHuman: Given the score the
|
||||
title ''The impact of AI in the future of work'' got, give me an integer score
|
||||
between 1-5 for the following title: ''Return of the Jedi''\nYour final answer
|
||||
must be: The score of the title.\nThis is the context you''re working with:\n{\n \"score\":
|
||||
4\n}\nAI: 5\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1264'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=K3UQ_j6oove.GtqHc7v80NAFRCsPqbFJnemHOjGEOnY-1708385151-1.0-AVMS2ooDMZItzVX1LJ/ipqo4vkTHKZkTrCmkgAkhqpsc+A0wpL7cvImkthYaBkYAzfvpQmd0VNd1qB1J0n6qEHU=;
|
||||
_cfuvid=YZhNQxUXrq6sftsVkCFrqcZHA7_1yiJaNqHeGj3myPo-1708385151407-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA2RSTY/TMBC951eMfOklrRq6aUtvSCskFsGB5bQIVa47iU1tj7EnZNGq/x05aZtd
|
||||
cbGU9zHz/JyXAkCYo9iBUFqycsHOt93m4f53v+kPTx8fn76enL1fPz8e5JfN516KMjvo8AsVX10L
|
||||
RS5YZEN+pFVEyZinVpvldrWtq7oeCEdHtNnWBp7fzZfranVxaDIKk9jBjwIA4GU4czZ/xGexg2V5
|
||||
RRymJFsUu5sIQESyGREyJZNYehblRCryjH6I+10j6M5JDzKdErBG+PAJmCBKxuGTDVuEWRYaF6Ri
|
||||
oCZrjB/opuMuYsZ6iqcZkAcJSUmL0ERyUOVhdZnPXhulryta8wfTIKXRfreAKQxr/C/RqHwV6Rty
|
||||
F332ZvABj6acvV2SL5GgN6xfb6oX4tLF+VaipTZEOuTCfWftDW+MN0nvI8pEPheWmMJoPxcAP4fH
|
||||
6t70L0IkF3jPdEKfB76rq3GemP6LiV1vLyQTSzvhq+p9cUko0t/E6PaN8S3GEM3wdjlncS7+AQAA
|
||||
//8DABWSknayAgAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85824591cdd51b22-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:57 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '2556'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299700'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 59ms
|
||||
x-request-id:
|
||||
- req_ed7e1c2569294d589acad08491338d66
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "5"}], "model": "gpt-4", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '435'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=K3UQ_j6oove.GtqHc7v80NAFRCsPqbFJnemHOjGEOnY-1708385151-1.0-AVMS2ooDMZItzVX1LJ/ipqo4vkTHKZkTrCmkgAkhqpsc+A0wpL7cvImkthYaBkYAzfvpQmd0VNd1qB1J0n6qEHU=;
|
||||
_cfuvid=YZhNQxUXrq6sftsVkCFrqcZHA7_1yiJaNqHeGj3myPo-1708385151407-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA2xSyW7bMBC96ysGc7YLK6o3HZsgLdxDCnQN6kBgKEpmyg3ksLBh+N8LSo7kGOWB
|
||||
GMzjWzDDYwaAssYSkO8Yce3UdBWXm/tm4z+0+cGyx73faLaubx5v6/WXF5wkhn1+EZxeWe+41U4J
|
||||
ktb0MPeCkUiq+XK2KlbzfL7sAG1roRKtdTR9P50t8uLM2FnJRcASfmcAAMfuTtlMLfZYwmzy2tEi
|
||||
BNYKLIdHAOitSh1kIchAzBBORpBbQ8KkuCYqdQGQtariTKnRuD/Hi3ocEFOqamRR3H/+uf/+EL8t
|
||||
PkX+sbj79deFHxd+vfTBdYGaaPgwmAt86JdXZgBomO64X7n14iGSi3RFB0Dm26iFoRQdj1sDsMWQ
|
||||
CFssYb41J3zDOGX/q5/O1WmYrrKt8/Y5XA0LG2lk2FVesNCFxkDW9RZJ7qnbYnyzGHTeakcV2T/C
|
||||
JMHFTS+H438ZwfUZI0tMje1lnp3jYTgEErpqpGmFd14OG81O2T8AAAD//wMA85s9hsgCAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858245a36da41b22-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:25:58 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '522'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299981'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 3ms
|
||||
x-request-id:
|
||||
- req_ede54f76c1fc9251bda3fb77a50139e0
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
525
tests/cassettes/test_output_pydantic.yaml
Normal file
525
tests/cassettes/test_output_pydantic.yaml
Normal file
@@ -0,0 +1,525 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: !!binary |
|
||||
Cux2CiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkSw3YKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRLGAQoQ06lZ/l0bRJ/H7Zwmo+guNBIIXQC+gK8AKtEqClRvb2wgVXNhZ2UwATnAF1kF
|
||||
LfC3F0EIBloFLfC3F0oZCgl0b29sX25hbWUSDAoKbXVsdGlwbGllckoOCghhdHRlbXB0cxICGAFK
|
||||
WQoDbGxtElIKUHsibmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC00IiwgInRlbXBlcmF0
|
||||
dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARKDCAoQfyeNgZJT1Ljsy3+7bV3f
|
||||
vhIIk8fTof1ES8EqDENyZXcgQ3JlYXRlZDABOejskjwt8LcXQaBRmTwt8LcXShoKDmNyZXdhaV92
|
||||
ZXJzaW9uEggKBjAuMTQuNEoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBIm
|
||||
CiQ2YTI0NWZmZS1jYzdjLTQ5NTAtYTZiYy05OWVkNDNiZjczNWVKHAoMY3Jld19wcm9jZXNzEgwK
|
||||
CnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNr
|
||||
cxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrXAgoLY3Jld19hZ2VudHMSxwIKxAJb
|
||||
eyJpZCI6ICI4OWJjOWM0My0wM2FmLTQ1YWYtYjYxMC1mYjI3Nzk3MzFlZDMiLCAicm9sZSI6ICJ0
|
||||
ZXN0IHJvbGUiLCAibWVtb3J5X2VuYWJsZWQ/IjogZmFsc2UsICJ2ZXJib3NlPyI6IHRydWUsICJt
|
||||
YXhfaXRlciI6IDUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5h
|
||||
bWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTQtMDEyNS1wcmV2aWV3XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KnQEKCmNyZXdfdGFza3MSjgEK
|
||||
iwFbeyJpZCI6ICJiN2Q4NmVlNS1iOTkyLTQyZjMtODkxYy1mYmFiNmQ2ZTNlYTIiLCAiYXN5bmNf
|
||||
ZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbImdldF9maW5hbF9hbnN3ZXIiXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJt
|
||||
NjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5
|
||||
c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNp
|
||||
b24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44
|
||||
MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESrgEKEMbEXcsM8clfj0tb
|
||||
9AYY70QSCFsEJcSQGHpxKhBUb29sIFVzYWdlIEVycm9yMAE5WFnbPy3wtxdBMLPbPy3wtxdKZgoD
|
||||
bGxtEl8KXXsibmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC00LTAxMjUtcHJldmlldyIs
|
||||
ICJ0ZW1wZXJhdHVyZSI6IDAuNywgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAES2QEKEMt2S75Q
|
||||
4kDiVwvA+DynwasSCHVk/VZsdmy6KgpUb29sIFVzYWdlMAE50DSxQy3wtxdBwAeyQy3wtxdKHwoJ
|
||||
dG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBSmYKA2xsbRJfCl17
|
||||
Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtNC0wMTI1LXByZXZpZXciLCAidGVtcGVy
|
||||
YXR1cmUiOiAwLjcsICJjbGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEt4IChD+bvrnRXI0KjSdwP/j
|
||||
WuNuEgja4fVULS7fPCoMQ3JldyBDcmVhdGVkMAE5eMZ/Si3wtxdBWGyBSi3wtxdKGgoOY3Jld2Fp
|
||||
X3ZlcnNpb24SCAoGMC4xNC40ShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lk
|
||||
EiYKJGJkNGZjNzRjLTZhZTMtNGNiZi1iYjkyLWE0NDMwNzY3YmUyY0ocCgxjcmV3X3Byb2Nlc3MS
|
||||
DAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rh
|
||||
c2tzEgIYAkobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSssCCgtjcmV3X2FnZW50cxK7Agq4
|
||||
Alt7ImlkIjogIjAxMjgwNjU3LTkxZWMtNGU3Mi1iZWEwLTM5ZWY3YTQ5ZjE0YyIsICJyb2xlIjog
|
||||
InRlc3Qgcm9sZSIsICJtZW1vcnlfZW5hYmxlZD8iOiBmYWxzZSwgInZlcmJvc2U/IjogdHJ1ZSwg
|
||||
Im1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntc
|
||||
Im5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwi
|
||||
OiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8i
|
||||
OiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqEAgoKY3Jld190YXNrcxL1AQryAVt7ImlkIjog
|
||||
ImJiYTdhMzQ2LWJhMDQtNGIwMy1hYmUwLTVjZDU1OTAyY2EzZCIsICJhc3luY19leGVjdXRpb24/
|
||||
IjogZmFsc2UsICJhZ2VudF9yb2xlIjogInRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFtdfSwg
|
||||
eyJpZCI6ICI3Mjk2MjI4MC00OTViLTQyMzctODJkMC1kMjgwYTRhZTM2NTMiLCAiYXN5bmNfZXhl
|
||||
Y3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRm
|
||||
b3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxh
|
||||
dGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIw
|
||||
IDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9U
|
||||
NjAzMEoKCgRjcHVzEgIYDHoCGAES4wcKEGCRrK0clBAW96Beh4o+VbQSCGJ5qP0jyKMIKgxDcmV3
|
||||
IENyZWF0ZWQwATnQ2B+4LfC3F0FA3iK4LfC3F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjE0LjRK
|
||||
GgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokNjZkOTdmMDUtNjMxMS00
|
||||
YWIwLTlmOGItMTBiMWFkZWQ2NjhmShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNy
|
||||
ZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVt
|
||||
YmVyX29mX2FnZW50cxICGAFKywIKC2NyZXdfYWdlbnRzErsCCrgCW3siaWQiOiAiNTVhZmZjY2Qt
|
||||
YmI2Zi00ZWM5LThjYWEtNzdhYmIwZGE0M2I1IiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9y
|
||||
eV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiAxNSwgIm1h
|
||||
eF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1v
|
||||
ZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBc
|
||||
IkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbXX1dSokBCgpjcmV3X3Rhc2tzEnsKeVt7ImlkIjogIjAwZjEwN2Y2LTMzNjAtNGU0Yi05
|
||||
YWUzLWMxN2Q5MDkyOTBjMyIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xl
|
||||
IjogInRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1Mt
|
||||
MTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxh
|
||||
dGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJu
|
||||
ZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51
|
||||
LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARKaDAoQeocx
|
||||
GtamhKHfx4C6HL0vChIID0PR6vXM/xIqDENyZXcgQ3JlYXRlZDABObjaSbgt8LcXQShRT7gt8LcX
|
||||
ShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTQuNEoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdK
|
||||
MQoHY3Jld19pZBImCiQ3YWYxODgxMi03NjMxLTRkMGYtOTI1MS0yZDdkNjBmODBmZTNKHAoMY3Jl
|
||||
d19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251
|
||||
bWJlcl9vZl90YXNrcxICGANKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAkqKBQoLY3Jld19h
|
||||
Z2VudHMS+gQK9wRbeyJpZCI6ICJkNTMzYTY2NC1iMjkzLTRhZjktOTc5ZC05N2FmZDQyOTkyZmEi
|
||||
LCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9z
|
||||
ZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4i
|
||||
LCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0
|
||||
ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRp
|
||||
b25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogImY4MWVlODAx
|
||||
LWZiYzEtNDRkMC1hZjkwLWE2YjEzMGE3OTVmNiIsICJyb2xlIjogIlNlbmlvciBXcml0ZXIiLCAi
|
||||
bWVtb3J5X2VuYWJsZWQ/IjogZmFsc2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAx
|
||||
NSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxs
|
||||
LCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFz
|
||||
c1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9v
|
||||
bHNfbmFtZXMiOiBbXX1dSoEDCgpjcmV3X3Rhc2tzEvICCu8CW3siaWQiOiAiMDVjNmM2ZjEtOWNh
|
||||
Yi00NDk0LWI2NmYtMGIyMGNmMDc3OTc1IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiB0cnVlLCAiYWdl
|
||||
bnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjdkYjE3
|
||||
MzAyLWFjOWQtNDMwOC1iYmVlLTgzOWM3Yjg5NDhmNCIsICJhc3luY19leGVjdXRpb24/IjogdHJ1
|
||||
ZSwgImFnZW50X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJpZCI6
|
||||
ICJmMzY4N2VjYi01MWQ5LTQ3YjktOWQyNi1kOTEyYTBhZDI4ODAiLCAiYXN5bmNfZXhlY3V0aW9u
|
||||
PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTZW5pb3IgV3JpdGVyIiwgInRvb2xzX25hbWVzIjog
|
||||
W119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9y
|
||||
bV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRm
|
||||
b3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAy
|
||||
MTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYw
|
||||
MzBKCgoEY3B1cxICGAx6AhgBEuUHChDXKKDNLSt/Hz06Txt2kChNEggfE1JQyUuKLioMQ3JldyBD
|
||||
cmVhdGVkMAE5mOPwuS3wtxdBOGTzuS3wtxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xNC40ShoK
|
||||
DnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGNkNWJmMzFkLTc3YTItNGJj
|
||||
My05ZDM4LWM5OGJkOGNkNDQzMUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3
|
||||
X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJl
|
||||
cl9vZl9hZ2VudHMSAhgBSs0CCgtjcmV3X2FnZW50cxK9Agq6Alt7ImlkIjogIjI2YjcxNDA3LTdi
|
||||
ZTItNDBkOC1iMGU2LThlYmM3Nzk5ZjliNyIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVtb3J5
|
||||
X2VuYWJsZWQ/IjogZmFsc2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1h
|
||||
eF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1v
|
||||
ZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBc
|
||||
IkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbXX1dSokBCgpjcmV3X3Rhc2tzEnsKeVt7ImlkIjogIjU0ZTllNjVhLWZiNWItNDM2Ny1h
|
||||
MTU1LTgxNzE3YzdkN2I1ZCIsICJhc3luY19leGVjdXRpb24/IjogdHJ1ZSwgImFnZW50X3JvbGUi
|
||||
OiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1Mt
|
||||
MTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxh
|
||||
dGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJu
|
||||
ZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51
|
||||
LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLlBwoQJNp9
|
||||
tir5aAtH2PX86rmLsBIIuAOUv/rXbUYqDENyZXcgQ3JlYXRlZDABOVidsrot8LcXQdB9tLot8LcX
|
||||
ShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTQuNEoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdK
|
||||
MQoHY3Jld19pZBImCiQ2MjNiM2VmNS0zMjQyLTQ2NmYtOGE5OC03Y2JkZTk5MzlkMjBKHAoMY3Jl
|
||||
d19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251
|
||||
bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrNAgoLY3Jld19h
|
||||
Z2VudHMSvQIKugJbeyJpZCI6ICJjOTY4NWNmOC1iOTk3LTQ3OWItYTlmZC0zYjZiZTE4OWYwMmUi
|
||||
LCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9z
|
||||
ZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4i
|
||||
LCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0
|
||||
ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRp
|
||||
b25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqJAQoKY3Jld190YXNrcxJ7
|
||||
CnlbeyJpZCI6ICI0NjI5Y2JiOC0xYjdkLTQ0YTAtYmJjNC0wZTEwYTJhOGRhOWEiLCAiYXN5bmNf
|
||||
ZXhlY3V0aW9uPyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBs
|
||||
YXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQ
|
||||
cGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVj
|
||||
IDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02
|
||||
NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESkggKEPUJSnwM+kRMevTGK9h/CV8SCBeeKTu2+W0jKgxD
|
||||
cmV3IENyZWF0ZWQwATkg32W8LfC3F0HA4me8LfC3F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjE0
|
||||
LjRKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZGQzMzRkOGItZWIy
|
||||
MC00ODM0LWIwZTMtYjhiNmIxMGUyZjIyShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUK
|
||||
DWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdf
|
||||
bnVtYmVyX29mX2FnZW50cxICGAFK6AIKC2NyZXdfYWdlbnRzEtgCCtUCW3siaWQiOiAiMzZhNzBj
|
||||
NjItODhkNC00OTkxLTg0OWQtYjhmODg1Y2M5ZmRkIiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1l
|
||||
bW9yeV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUs
|
||||
ICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwg
|
||||
XCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTQtMDEyNS1wcmV2aWV3XCIsIFwidGVtcGVyYXR1cmVcIjog
|
||||
MC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInRvb2xzX25hbWVzIjogWyJsZWFybl9hYm91dF9BSSJdfV1KmwEKCmNyZXdfdGFza3MS
|
||||
jAEKiQFbeyJpZCI6ICIzYWRiNTcwMy0yNGIwLTRmM2ItYWZiMS03NjhiNDZmNWNkZDEiLCAiYXN5
|
||||
bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNf
|
||||
bmFtZXMiOiBbImxlYXJuX2Fib3V0X0FJIl19XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFy
|
||||
bTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9z
|
||||
eXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJz
|
||||
aW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIu
|
||||
ODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxICGAx6AhgBEq4BChA336nwdhAGiCcD
|
||||
5r4S8p4cEgj4FIDf/XjQoCoQVG9vbCBVc2FnZSBFcnJvcjABORjDx78t8LcXQWgJyL8t8LcXSmYK
|
||||
A2xsbRJfCl17Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtMy41LXR1cmJvLTAxMjUi
|
||||
LCAidGVtcGVyYXR1cmUiOiAwLjcsICJjbGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEq4BChCwFwqo
|
||||
hBc9Ko/tcr5iU/vSEghSywDYYEDOASoQVG9vbCBVc2FnZSBFcnJvcjABOfDuk8It8LcXQXAtlMIt
|
||||
8LcXSmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtMy41LXR1cmJv
|
||||
LTAxMjUiLCAidGVtcGVyYXR1cmUiOiAwLjcsICJjbGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEoII
|
||||
ChD2Im5nOxM/PbkkWapiVXZDEghKDVMkqgJ/kCoMQ3JldyBDcmVhdGVkMAE5MAEQxy3wtxdBoI8R
|
||||
xy3wtxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xNC40ShoKDnB5dGhvbl92ZXJzaW9uEggKBjMu
|
||||
MTEuN0oxCgdjcmV3X2lkEiYKJDFiYzIzNDFiLTdlNjEtNDFkMy04NmNkLTcyZDVkZDVlNGE2OUoc
|
||||
CgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQKAmVuShoKFGNy
|
||||
ZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBStoCCgtj
|
||||
cmV3X2FnZW50cxLKAgrHAlt7ImlkIjogIjdhNmM1MTMzLTM2Y2UtNDlmZC05NjBiLWRjYTkwODQ1
|
||||
NTFiMCIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogZmFsc2UsICJ2
|
||||
ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6
|
||||
ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwi
|
||||
LCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVs
|
||||
ZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbInJldHVybl9kYXRhIl19
|
||||
XUqZAQoKY3Jld190YXNrcxKKAQqHAVt7ImlkIjogImRkOWYyNmZmLWZiOWItNDgwMy1hNzI3LWRm
|
||||
YjI4ZTJkZWNhNyIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlJl
|
||||
c2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbInJldHVybl9kYXRhIl19XUooCghwbGF0Zm9ybRIc
|
||||
ChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMu
|
||||
MEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24SZwplRGFy
|
||||
d2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1QgMjAyMzsg
|
||||
cm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxICGAx6AhgB
|
||||
EscBChBtFb7dkmfkKuii1fdHvi3REggrx+F2Oq7sDCoKVG9vbCBVc2FnZTABOUApZsot8LcXQdCO
|
||||
Zsot8LcXShoKCXRvb2xfbmFtZRINCgtyZXR1cm5fZGF0YUoOCghhdHRlbXB0cxICGAFKWQoDbGxt
|
||||
ElIKUHsibmFtZSI6IG51bGwsICJtb2RlbF9uYW1lIjogImdwdC00IiwgInRlbXBlcmF0dXJlIjog
|
||||
MC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARLlBwoQJkOw3Q6f/N8fwhiQSxI+pxIIwB52
|
||||
LJ7U5LsqDENyZXcgQ3JlYXRlZDABOeAH7s0t8LcXQaDc780t8LcXShoKDmNyZXdhaV92ZXJzaW9u
|
||||
EggKBjAuMTQuNEoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQyYTVk
|
||||
ODk0MS03ZWMyLTQ5ODEtYTc1Yi0wM2FiMGYyNTM3MzJKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVl
|
||||
bnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFK
|
||||
GwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrMAgoLY3Jld19hZ2VudHMSvAIKuQJbeyJpZCI6
|
||||
ICJkMjJlYjliOS0xYWE5LTQzNTYtOWZiNi0xYTQ2YjI2Nzc0MGQiLCAicm9sZSI6ICJSZXNlYXJj
|
||||
aGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9p
|
||||
dGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVc
|
||||
IjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcs
|
||||
IFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiB0cnVl
|
||||
LCAidG9vbHNfbmFtZXMiOiBbXX1dSooBCgpjcmV3X3Rhc2tzEnwKelt7ImlkIjogIjM2NGZlODg2
|
||||
LTYzMjMtNDYwNC1iMDVhLWMzOGY3MTg3N2Y0MiIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2Us
|
||||
ICJhZ2VudF9yb2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRm
|
||||
b3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoG
|
||||
MjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJn
|
||||
CmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAy
|
||||
MDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIY
|
||||
DHoCGAES5gcKEHSq/vk2rj36HCEpWqoCs+4SCJe4bAUYCZg/KgxDcmV3IENyZWF0ZWQwATkgy6PO
|
||||
LfC3F0FIZaXOLfC3F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjE0LjRKGgoOcHl0aG9uX3ZlcnNp
|
||||
b24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZjA1ZTU0NTUtMzUxNi00Mzk2LTlkMWEtNjQ5Nzcy
|
||||
NmI3ZDRkShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoC
|
||||
ZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxIC
|
||||
GAFKzQIKC2NyZXdfYWdlbnRzEr0CCroCW3siaWQiOiAiNTAwN2U1YWUtMGRhOC00Mzg0LWIxMGMt
|
||||
NmFkNmZhYzY1YmQ1IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5hYmxlZD8iOiBm
|
||||
YWxzZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGws
|
||||
ICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBc
|
||||
ImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwi
|
||||
fSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KigEK
|
||||
CmNyZXdfdGFza3MSfAp6W3siaWQiOiAiZDQ2MTIzNGItMzhkOS00MjQzLWFlNTItZmQ1NWU0OWFh
|
||||
YWVkIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiUmVzZWFyY2hl
|
||||
ciIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1h
|
||||
cm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVt
|
||||
EggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAy
|
||||
My4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+
|
||||
Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLmBwoQWIC/smCAKXQftjZ3LMBw
|
||||
ZxIIMF3pvdlzK94qDENyZXcgQ3JlYXRlZDABOWCvA9It8LcXQXgiBdIt8LcXShoKDmNyZXdhaV92
|
||||
ZXJzaW9uEggKBjAuMTQuNEoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBIm
|
||||
CiRmNzZjM2NjZC1iZTI4LTQxOGUtYmNmNS1lMjMwNDIyMDEzOGVKHAoMY3Jld19wcm9jZXNzEgwK
|
||||
CnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNr
|
||||
cxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrNAgoLY3Jld19hZ2VudHMSvQIKugJb
|
||||
eyJpZCI6ICJjNjNmOTBiZC0wYzVlLTRmYzQtODA0Yi05ZmNhYzJhNzAxNTUiLCAicm9sZSI6ICJS
|
||||
ZXNlYXJjaGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9zZT8iOiBmYWxzZSwg
|
||||
Im1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntc
|
||||
Im5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwi
|
||||
OiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8i
|
||||
OiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqKAQoKY3Jld190YXNrcxJ8CnpbeyJpZCI6ICJj
|
||||
NzFmZGM0ZC1iY2Q5LTQ4MTAtODU4Ni0xNjZhM2U0ZWYyNzMiLCAiYXN5bmNfZXhlY3V0aW9uPyI6
|
||||
IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRvb2xzX25hbWVzIjogW119XUoo
|
||||
CghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxl
|
||||
YXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3Zl
|
||||
cnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1
|
||||
OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoE
|
||||
Y3B1cxICGAx6AhgBEuIHChDAICs8Tq+1TMIXgv4snrK8EghRPrgi8fajZioMQ3JldyBDcmVhdGVk
|
||||
MAE5yP3p1S3wtxdBaITr1S3wtxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xNC40ShoKDnB5dGhv
|
||||
bl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGIwMmE0OThiLTE1NzYtNDczYi04MjIy
|
||||
LTVhYmY3NDdjODIyZEoeCgxjcmV3X3Byb2Nlc3MSDgoMaGllcmFyY2hpY2FsShUKDWNyZXdfbGFu
|
||||
Z3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29m
|
||||
X2FnZW50cxICGAFKzQIKC2NyZXdfYWdlbnRzEr0CCroCW3siaWQiOiAiNDFhZWIwMGEtYjNlZS00
|
||||
YzM0LTg4ZjgtNDIyMmJiNDgxYmNkIiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5h
|
||||
YmxlZD8iOiBmYWxzZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3Jw
|
||||
bSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxf
|
||||
bmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hh
|
||||
dE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6
|
||||
IFtdfV1KhAEKCmNyZXdfdGFza3MSdgp0W3siaWQiOiAiODZkNGQwZDEtY2JiNC00YjlhLThlMTEt
|
||||
ZTE3ODI1MjYxYWNhIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAi
|
||||
Tm9uZSIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02
|
||||
NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lz
|
||||
dGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lv
|
||||
biAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgx
|
||||
LjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLWAQoQkyWjrn/TLDtsCJzm
|
||||
IkycPxIICz81JziWnS8qClRvb2wgVXNhZ2UwATkgW6neLfC3F0HguKneLfC3F0opCgl0b29sX25h
|
||||
bWUSHAoaRGVsZWdhdGUgd29yayB0byBjby13b3JrZXJKDgoIYXR0ZW1wdHMSAhgBSlkKA2xsbRJS
|
||||
ClB7Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtNCIsICJ0ZW1wZXJhdHVyZSI6IDAu
|
||||
MCwgImNsYXNzIjogIkNoYXRPcGVuQUkifXoCGAES3gcKEO+8FUdBgGjPyb3nijwXUngSCCYWis3T
|
||||
OHpKKgxDcmV3IENyZWF0ZWQwATmI9n/mLfC3F0Ewz4HmLfC3F0oaCg5jcmV3YWlfdmVyc2lvbhII
|
||||
CgYwLjE0LjRKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZmYyMDQ4
|
||||
NWItOTQ5Mi00Nzg0LWJhZDYtNjY3NTVmYWI5YmNlShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50
|
||||
aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsK
|
||||
FWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFKyQIKC2NyZXdfYWdlbnRzErkCCrYCW3siaWQiOiAi
|
||||
MTllODFiMjQtNmZkOC00YTUxLWJiOGItNWJjZmE5OTNhZDI0IiwgInJvbGUiOiAiU2NvcmVyIiwg
|
||||
Im1lbW9yeV9lbmFibGVkPyI6IGZhbHNlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjog
|
||||
MTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVs
|
||||
bCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xh
|
||||
c3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRv
|
||||
b2xzX25hbWVzIjogW119XUqGAQoKY3Jld190YXNrcxJ4CnZbeyJpZCI6ICJlMDg1NjI4Ny0wZmZl
|
||||
LTQ0MjYtYjVmMC1jNzE2YjMxMTIzZTkiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdl
|
||||
bnRfcm9sZSI6ICJTY29yZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAE=
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate, br
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '15215'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.23.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:44 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\nYour final answer must be: The score of the title.\n\n
|
||||
Begin! This is VERY important to you, your job depends on it!\n\n\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '707'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vP1tSz8Kkwkq4zYP9KIp6qeIzH","object":"chat.completion.chunk","created":1709098663,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c65137eace0183-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:44 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=VYlpouNqj.i5eS.llxXNP2Dh8Cj16YhDWm9tmUfkN7Y-1709098664-1.0-AYI2wf2jY5Tjd7yn5TyfVJpQL/j0lzXGXlWBerMiFakTanaUcOc9YmeNQLOuieEee/alMHNEaLSVYmsM8ff7Yeo=;
|
||||
path=/; expires=Wed, 28-Feb-24 06:07:44 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=83gHU7okXl7urv.GU0uf0fC.waIxA3g6z0CIpf6Gqzs-1709098664354-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '385'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299842'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_0df1662dd81b7a90918f1d4fdf5f604f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '518'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=VYlpouNqj.i5eS.llxXNP2Dh8Cj16YhDWm9tmUfkN7Y-1709098664-1.0-AYI2wf2jY5Tjd7yn5TyfVJpQL/j0lzXGXlWBerMiFakTanaUcOc9YmeNQLOuieEee/alMHNEaLSVYmsM8ff7Yeo=;
|
||||
_cfuvid=83gHU7okXl7urv.GU0uf0fC.waIxA3g6z0CIpf6Gqzs-1709098664354-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
IRwLACD+v3Z9n6a659ULDtJ9EKNlNNLUTXBp0B7nY2qbvnQLhl0gU0s0gYDKsnUubnfo9ciacG/L
|
||||
ANqGxEHBxssCacTQqazi4e5iPbeOgvrq6mx2sBTb+u1ifffolTRgAGl/FwVZT3zkYqButYIgJ5cf
|
||||
RSFxTO2JO3Edy1oIr0gdRoI4aG2y4WI4sabzXocbvQ2ilDi+GAC0pQFA7953iGMyaEW8MATiwJcA
|
||||
SrSIiIO8NN2mmacyGgAyYetGOVQuhHAq01r8BZ4Q5FAOtvXMIv3qCfHXTOsijN6a0zzI99ZlHR08
|
||||
X93e7O85IfVMbQaFemAUYlBZFScMAaQ8OaGnngKdRHd5ZvJMhQGQib8nDmq/FfBNaaCT6Js4Ft+q
|
||||
J9W1PatjP/R4TzGPhF6bRPvpguEUtQ74uyFshDgozbShMXsG/FiynBvnDJlES5P9ZXofqZQ4bFsl
|
||||
QHsmA8DclPnhgYxIcyw2RIKiCCz8rbZqHSUm2VpVZj0zAAM=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6513dde0b0183-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:45 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '595'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299969'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 6ms
|
||||
x-request-id:
|
||||
- req_d5d5ea0f7a78c31da11e7b20220530da
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
485
tests/cassettes/test_output_pydantic_to_another_task.yaml
Normal file
485
tests/cassettes/test_output_pydantic_to_another_task.yaml
Normal file
@@ -0,0 +1,485 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\nYour final answer must be: The score of the title.\n\n
|
||||
Begin! This is VERY important to you, your job depends on it!\n\n\n"}], "model":
|
||||
"gpt-4-0125-preview", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '720'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
score"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vSHnKpeRUkWUr64TaKxKwYr55l","object":"chat.completion.chunk","created":1709098666,"model":"gpt-4-0125-preview","system_fingerprint":"fp_89b1a570e1","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6514609f10183-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:46 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=QwfZbmYv1tgZDvQYAsGPH5rNXRpLrDZsiQUY79E.GXw-1709098666-1.0-ASl39i/eqx5as/swlkaMYPeezDfFSCSwNTgXtHIPIMMmBIIbmfny4qsfHBKov+s148VW7mN8YedP0fShByF29tU=;
|
||||
path=/; expires=Wed, 28-Feb-24 06:07:46 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=JUMf.2ZCm2AD6GZnKI2VzV.IBkMxyroIz775HW_M3y0-1709098666563-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0125-preview
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '359'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '800000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '799843'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 11ms
|
||||
x-request-id:
|
||||
- req_6e433280da9185055821a30ee24f4971
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "The score of the title is 4."},
|
||||
{"role": "system", "content": "I''m gonna convert this raw text into valid JSON."}],
|
||||
"model": "gpt-3.5-turbo-0125", "tool_choice": {"type": "function", "function":
|
||||
{"name": "ScoreOutput"}}, "tools": [{"type": "function", "function": {"name":
|
||||
"ScoreOutput", "description": "Correctly extracted `ScoreOutput` with all the
|
||||
required parameters with correct types", "parameters": {"properties": {"score":
|
||||
{"title": "Score", "type": "integer"}}, "required": ["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '558'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
IUwLACD+v27+zjK9c2dMM34xD3ibkmD1//c3weUPau7zMXW9i34BJRh2afitSwNNIKCybJ2LxebQ
|
||||
J8macO9OI0KTgxGymo/ZQgkjXofLp6ej94OH27fs5Wl2d9c9nftVFb68bi6ga0SQ6azIRk7RdDHw
|
||||
G9lCyMnlvCIHIyeyEzuJwzAiXlnIvBBghEqNhmcGxjj1qTRsxw24ubVssmIAo0+NiGhXmxHh/fsZ
|
||||
GNl6L/HGsMCALxGhl6IAI/BhaIaRtyN0AAhft8ionYQgro1Sit+MCyEcleyuDRbrVy7E7/Tw6pz7
|
||||
/MN/ftzcOteOCOOTh2jqZKLhtY2aFPXEGMRQ2hQTWCNCyxf8Tz1msi9up1FNY46IYOoPwQi7LwyZ
|
||||
7IsvMP8fCrD/6oC28K2k+L9U2yBkpXqZDklE2Drm94qwDTDCMEolJv7XiL5NOJ6scgaqlws1/o5y
|
||||
XrQDGCXOkAw2DQAIQQJfPABIQRJqU5QQRmDnt2zaquhV35jzAEr1G4dOEPLE5za0f80AAw==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6514e2fb24ceb-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:47 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=f4rbOFlQDkHTO1CJ5zGphShVudt1Ux6oHL6ovYldFMA-1709098667-1.0-AQKGavWMfHbjQskiKVkW9ni7vtF5CaXWc1lhO55lk0VjiBNtgnRcySGXxDMoZ5bKBjcS5Aa/Hz7cOJbBq/MjmqE=;
|
||||
path=/; expires=Wed, 28-Feb-24 06:07:47 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=qcKHSeH3GbeSGerERe_0w3VELIZ59QNXUWDBOUPTCT4-1709098667592-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0125
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '137'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '1000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '999963'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 2ms
|
||||
x-request-id:
|
||||
- req_2779c5f91f3f47d098295e66694d6943
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: Given the score the title ''The impact of AI in the future of work'' got,
|
||||
give me an integer score between 1-5 for the following title: ''Return of the
|
||||
Jedi'', you MUST give it a score, use your best judgment\nYour final answer
|
||||
must be: The score of the title.\nThis is the context you''re working with:\nscore=4\n\n
|
||||
Begin! This is VERY important to you, your job depends on it!\n\n\n"}], "model":
|
||||
"gpt-4-0125-preview", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '873'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=QwfZbmYv1tgZDvQYAsGPH5rNXRpLrDZsiQUY79E.GXw-1709098666-1.0-ASl39i/eqx5as/swlkaMYPeezDfFSCSwNTgXtHIPIMMmBIIbmfny4qsfHBKov+s148VW7mN8YedP0fShByF29tU=;
|
||||
_cfuvid=JUMf.2ZCm2AD6GZnKI2VzV.IBkMxyroIz775HW_M3y0-1709098666563-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vTZxKNq2PevrhKykpkCfhSSAxD","object":"chat.completion.chunk","created":1709098667,"model":"gpt-4-0125-preview","system_fingerprint":"fp_c8aa5a06d6","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c651510f1a0183-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:48 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0125-preview
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '345'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '800000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '799805'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 14ms
|
||||
x-request-id:
|
||||
- req_f461e8642e0724844f72a1d6697f491a
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-3.5-turbo-0125",
|
||||
"tool_choice": {"type": "function", "function": {"name": "ScoreOutput"}}, "tools":
|
||||
[{"type": "function", "function": {"name": "ScoreOutput", "description": "Correctly
|
||||
extracted `ScoreOutput` with all the required parameters with correct types",
|
||||
"parameters": {"properties": {"score": {"title": "Score", "type": "integer"}},
|
||||
"required": ["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '531'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=f4rbOFlQDkHTO1CJ5zGphShVudt1Ux6oHL6ovYldFMA-1709098667-1.0-AQKGavWMfHbjQskiKVkW9ni7vtF5CaXWc1lhO55lk0VjiBNtgnRcySGXxDMoZ5bKBjcS5Aa/Hz7cOJbBq/MjmqE=;
|
||||
_cfuvid=qcKHSeH3GbeSGerERe_0w3VELIZ59QNXUWDBOUPTCT4-1709098667592-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
IUwLACD25eb3NNWbrcGj130xReNnIyT+9zfB5Q9q7vMxdb2LfgElGHZp+K1LA00goLJsnYvF5tAn
|
||||
yZpw784jwqwBI9TT0tVC82Cw7i3fTrbDpP86dXxTp8PpevM6O+u+TI84fI8IqpqPascphi4G/kxJ
|
||||
CDm5nDdqwCjpx8N4OOj1BsQrQjUjDkaYaBdkYTdwralUECdpl5s7VbN6ZMHoxyMi2tVmRHj/fgZG
|
||||
sd9LvDEsMOBLRDCKj8AIpbUz60rp4ANA+LpFRrLlnLjmlOJ5XXIuHJXsrg0W69eS87yVffNpt/OT
|
||||
5PJNjFaDp0q/vIrbTCYaXtvoSVFPjEEMpU0xgTUiyFLwP/VSKzN6aJ1uXY6IYOoPwQi7X9hamdEv
|
||||
WOcABdiDOqAt/CkpHpZqG7iaaKMqm0SErWPyK8I2wAjWKS0mPnhEfyYct1Y5A22U0C53ajGSFowG
|
||||
2ZAMNg0ACN0EvngAkILBwJuihDACO/l4Jicjo83MnAcw1vmgl3R75bBTxvAOngED
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c65155ca314ceb-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:49 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-3.5-turbo-0125
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '165'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '1000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '999968'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1ms
|
||||
x-request-id:
|
||||
- req_5ac6ab3793a274557791551eb014de64
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
966
tests/cassettes/test_save_task_json_output.yaml
Normal file
966
tests/cassettes/test_save_task_json_output.yaml
Normal file
@@ -0,0 +1,966 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo complete the task you MUST follow the format:\n\n```\nFinal Answer:
|
||||
[your most complete final answer goes here]\n``` You must use these formats,
|
||||
my life depends on it.This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: Give me an
|
||||
integer score between 1-5 for the following title: ''The impact of AI in the
|
||||
future of work''\nYour final answer must be: The score of the title.\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '692'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7oJI7FhfdrBN2WPeKTWx0zf0ux0","object":"chat.completion.chunk","created":1708387083,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858274a70d60a4bd-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:58:04 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=Fzt.3y3dhP8ruN1TWiH6jLm0CcZkNtLDNsdfw_mz02I-1708387084-1.0-Ad3cAs7mDylg8sxtK6Ttgud1zpXW+twV1R26Y6LmMVz79On3i0JoMvjsIx7HWsHeZoJYHqaoXrWBV6c7wESpq3A=;
|
||||
path=/; expires=Tue, 20-Feb-24 00:28:04 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=j_ZBAzbEz7fMKPo29aitwE0ivRAZjvpZR17gpXyL1o4-1708387084116-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '448'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299845'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_209df8a711cf6bf7c367149b0b6f8242
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: !!binary |
|
||||
CveKAgokCiIKDHNlcnZpY2UubmFtZRISChBjcmV3QUktdGVsZW1ldHJ5Es2KAgoSChBjcmV3YWku
|
||||
dGVsZW1ldHJ5EvIHChAJNYRcEeEiqHTciFFAshfLEgiUxTCQyyT4kioMQ3JldyBDcmVhdGVkMAE5
|
||||
6ICmVv9otRdBEAOsVv9otRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92
|
||||
ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDYxYmZkNDk4LTQxOTUtNDJjMC1iOGU5LTky
|
||||
NGMwZTdjZGEyMUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdl
|
||||
EgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2Vu
|
||||
dHMSAhgBSsYCCgtjcmV3X2FnZW50cxK2AgqzAlt7ImlkIjogIjkwNzFkMjk4LWVhODYtNDRjMS1h
|
||||
YjRhLWUzM2ZmMjY2OTVhNSIsICJyb2xlIjogInRlc3Qgcm9sZSIsICJtZW1vcnlfZW5hYmxlZD8i
|
||||
OiB0cnVlLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiA0LCAibWF4X3JwbSI6IDEwLCAi
|
||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfV1KnQEKCmNy
|
||||
ZXdfdGFza3MSjgEKiwFbeyJpZCI6ICJmYTYxZTMyMC02NjJiLTRhZmEtYTM2Mi0xYjFjOTlmMGEw
|
||||
OTQiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUi
|
||||
LCAidG9vbHNfbmFtZXMiOiBbImdldF9maW5hbF9hbnN3ZXIiXX1dSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESowsK
|
||||
EKI61/O3gArNWKXyLquTVFwSCHxA8ssjIlgBKgxDcmV3IENyZWF0ZWQwATngz2Rg/2i1F0GgpGZg
|
||||
/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokMDg1NjI1OTMtMjA3Yi00YzliLWI0YzItZDI1YjA2OTA4NjA5ShwK
|
||||
DGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jl
|
||||
d19udW1iZXJfb2ZfdGFza3MSAhgCShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAJK/QQKC2Ny
|
||||
ZXdfYWdlbnRzEu0ECuoEW3siaWQiOiAiNzNlNDZmZDgtMThlOC00Yzc3LWIzMzEtYWI5N2FkMWNh
|
||||
MDcxIiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJi
|
||||
b3NlPyI6IHRydWUsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IDEwLCAiaTE4biI6ICJlbiIs
|
||||
ICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRl
|
||||
bXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlv
|
||||
bl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJpZCI6ICI3Y2FiNDdlZS00
|
||||
NjQ3LTQxNTQtYTBhNC1kNDM1NzY5Nzk2ZmMiLCAicm9sZSI6ICJ0ZXN0IHJvbGUyIiwgIm1lbW9y
|
||||
eV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IHRydWUsICJtYXhfaXRlciI6IDIsICJtYXhf
|
||||
cnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2Rl
|
||||
bF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJD
|
||||
aGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiB0cnVlLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX1dSpcCCgpjcmV3X3Rhc2tzEogCCoUCW3siaWQiOiAiMDY5MDYxYzctMDk5NS00ODg1LWFk
|
||||
YzYtMjNhOWRmMTAzZGNhIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUi
|
||||
OiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjIzM2YxYWUyLTA5ZjIt
|
||||
NGFhNi05ZWU5LTNjNzAxMDU2ODdjZCIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2Vu
|
||||
dF9yb2xlIjogInRlc3Qgcm9sZTIiLCAidG9vbHNfbmFtZXMiOiBbImdldF9maW5hbF9hbnN3ZXIi
|
||||
XX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3Jt
|
||||
X3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZv
|
||||
cm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIx
|
||||
OjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAz
|
||||
MEoKCgRjcHVzEgIYDHoCGAES9QcKEKK+FPWEZI+7agvaUnDOuzQSCM3TO+MjYzmGKgxDcmV3IENy
|
||||
ZWF0ZWQwATmInCxq/2i1F0GoYS5q/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoO
|
||||
cHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokYzBlYjFjNzItOTJhNy00MjIz
|
||||
LWIwOWMtMzhhNTFkMzdlZDhlShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdf
|
||||
bGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVy
|
||||
X29mX2FnZW50cxICGAFKyQIKC2NyZXdfYWdlbnRzErkCCrYCW3siaWQiOiAiZDZjYWE1OTItMDBk
|
||||
My00YjkyLWE4YWItYjcwMDgzZWIxZmI4IiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9l
|
||||
bmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IHRydWUsICJtYXhfaXRlciI6IDE1LCAibWF4X3Jw
|
||||
bSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxf
|
||||
bmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hh
|
||||
dE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjog
|
||||
W119XUqdAQoKY3Jld190YXNrcxKOAQqLAVt7ImlkIjogImFlNTQ5ZGNkLWFmODYtNDU5NC1iZmYz
|
||||
LTI1OGU1ODJiNTM1YSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjog
|
||||
InRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFsiZ2V0X2ZpbmFsX2Fuc3dlciJdfV1KKAoIcGxh
|
||||
dGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRII
|
||||
CgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9u
|
||||
EmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNU
|
||||
IDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMS
|
||||
AhgMegIYARL0BwoQOm2IgM0Nl5jB0IWeMN44TBIIPiJkrp4aQNEqDENyZXcgQ3JlYXRlZDABOfDw
|
||||
cHP/aLUXQeC9cnP/aLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVy
|
||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQyNWJhMDliYi02NjFjLTQzMmUtYjFkOS1mZDBi
|
||||
MWJiZWY5ZDJKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
||||
EgIYAUrIAgoLY3Jld19hZ2VudHMSuAIKtQJbeyJpZCI6ICJmMzgzNmUxZi0xODkxLTRjOTUtYjky
|
||||
NS05NTY4OGNhMGU1ODMiLCAicm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInZlcmJvc2U/IjogdHJ1ZSwgIm1heF9pdGVyIjogNCwgIm1heF9ycG0iOiBudWxsLCAi
|
||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfV1KnQEKCmNy
|
||||
ZXdfdGFza3MSjgEKiwFbeyJpZCI6ICJmZDRlZmRhMi0wZGUzLTQwNjUtYWFjMS02ZTk2YWUxOWI2
|
||||
NjIiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUi
|
||||
LCAidG9vbHNfbmFtZXMiOiBbImdldF9maW5hbF9hbnN3ZXIiXX1dSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESjwwK
|
||||
ELSwYmxhFBMIZKbUCnkT+eMSCAFtGCSOchWaKgxDcmV3IENyZWF0ZWQwATnwR+h8/2i1F0GYIOp8
|
||||
/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokYzkyNDkxZmQtZDAzZS00ODhiLWEwOWQtMjg5Y2M3ZGRkMDM3ShwK
|
||||
DGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jl
|
||||
d19udW1iZXJfb2ZfdGFza3MSAhgDShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAJKggUKC2Ny
|
||||
ZXdfYWdlbnRzEvIECu8EW3siaWQiOiAiN2U5YmZiOTUtMTcyNy00M2JhLWI1Y2YtYzgzNjMxNzhh
|
||||
YjFiIiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJi
|
||||
b3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJl
|
||||
biIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBc
|
||||
InRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdh
|
||||
dGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJpZCI6ICI2MzkzYTNk
|
||||
Mi03ZjhmLTRkZjctOGM3Mi1jMWJjYjc0OGU0NmMiLCAicm9sZSI6ICJ0ZXN0IHJvbGUyIiwgIm1l
|
||||
bW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwg
|
||||
Im1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBc
|
||||
Im1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wi
|
||||
OiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19u
|
||||
YW1lcyI6IFtdfV1K/gIKCmNyZXdfdGFza3MS7wIK7AJbeyJpZCI6ICIyNWVkYjBhMi1mNjEwLTRi
|
||||
MmQtOGQ5ZC1jYzY4Yzg4ZjEwMzkiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRf
|
||||
cm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMiOiBbXX0sIHsiaWQiOiAiMDY0MWU0ZTgt
|
||||
M2IwOC00ZTdkLWEwODQtZjc3ZGVkZWQzNGQ0IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwg
|
||||
ImFnZW50X3JvbGUiOiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjVl
|
||||
MmI0MWY1LTY5MDUtNDk1Yi04YjJiLWY2NGQ1MmE4YTczMCIsICJhc3luY19leGVjdXRpb24/Ijog
|
||||
ZmFsc2UsICJhZ2VudF9yb2xlIjogInRlc3Qgcm9sZTIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigK
|
||||
CHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVh
|
||||
c2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVy
|
||||
c2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5
|
||||
IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRj
|
||||
cHVzEgIYDHoCGAEShAgKEBAXqmYdbMjB+KnMIGD6fNYSCGF7u8bAau6oKgxDcmV3IENyZWF0ZWQw
|
||||
ATloOoeM/2i1F0HwxIiM/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9u
|
||||
X3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZDY3MzFmODMtMWEwMS00ZTQ1LWE5YjMt
|
||||
NmMzNDRjOTQxMWZlShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3Vh
|
||||
Z2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2Fn
|
||||
ZW50cxICGAFK2gIKC2NyZXdfYWdlbnRzEsoCCscCW3siaWQiOiAiMTIzMWQ3YTYtNjI1Yy00ODlm
|
||||
LWIyM2ItZjI0OTM1M2ZlYjgwIiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVk
|
||||
PyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBu
|
||||
dWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVc
|
||||
IjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVu
|
||||
QUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFsibGVh
|
||||
cm5fYWJvdXRfQUkiXX1dSpsBCgpjcmV3X3Rhc2tzEowBCokBW3siaWQiOiAiNmNjNzg5OTYtODNi
|
||||
Zi00MTY2LWFlMWYtZWFlYTY1ZjliMDQzIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFn
|
||||
ZW50X3JvbGUiOiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogWyJsZWFybl9hYm91dF9BSSJd
|
||||
fV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARKECAoQ08ZgnzWwn+DKMkWBVkUKHhIIaS0IvhJt/NwqDENyZXcgQ3Jl
|
||||
YXRlZDABOQDEKZb/aLUXQZh1K5b/aLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5w
|
||||
eXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRlY2RiMDI1MS02MDk2LTQ1OWQt
|
||||
YmIyMS01NGNlMzczMzM0YjBKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAUraAgoLY3Jld19hZ2VudHMSygIKxwJbeyJpZCI6ICJiMzllY2JiYi1hOGQy
|
||||
LTRkMDItODMxMy1iNzUzMTMwNzk4NDgiLCAicm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2Vu
|
||||
YWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3Jw
|
||||
bSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxf
|
||||
bmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hh
|
||||
dE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjog
|
||||
WyJsZWFybl9hYm91dF9BSSJdfV1KmwEKCmNyZXdfdGFza3MSjAEKiQFbeyJpZCI6ICI5ODgwMzYw
|
||||
MS1mZGQ3LTRlYzItYjdkNi1lNmRiNDg1NWZlOTEiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNl
|
||||
LCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMiOiBbImxlYXJuX2Fib3V0
|
||||
X0FJIl19XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0
|
||||
Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBs
|
||||
YXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAy
|
||||
MCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRf
|
||||
VDYwMzBKCgoEY3B1cxICGAx6AhgBErIEChBrivKwczy6M0XBe5RUKSOpEggllfJ7MFxZ6CoMQ3Jl
|
||||
dyBDcmVhdGVkMAE5iN3aov9otRdBEGjcov9otRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4x
|
||||
ShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDM2NTU2MTI2LTEwMDQt
|
||||
NGUzNS05N2MzLWMyZTMyNDA1Nzc4N0ocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1j
|
||||
cmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAEobChVjcmV3X251
|
||||
bWJlcl9vZl9hZ2VudHMSAhgAShMKC2NyZXdfYWdlbnRzEgQKAltdShIKCmNyZXdfdGFza3MSBAoC
|
||||
W11KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARKyBAoQIuiOy8KC/HXbljhuxQah8BII+8qTPncJ3isqDENyZXcgQ3Jl
|
||||
YXRlZDABOYg84KL/aLUXQTAb4aL/aLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5w
|
||||
eXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQwNTJhYmYyNS0wMjNiLTQ0MmYt
|
||||
YjBkMy1mMmE2ZTZhYWE3NTlKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGABKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAEoTCgtjcmV3X2FnZW50cxIECgJbXUoSCgpjcmV3X3Rhc2tzEgQKAltdSigK
|
||||
CHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVh
|
||||
c2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVy
|
||||
c2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5
|
||||
IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRj
|
||||
cHVzEgIYDHoCGAESsgQKEPzk2jmJI2EkoYkrrPvvqSASCO2WOm6AjhlVKgxDcmV3IENyZWF0ZWQw
|
||||
ATlYMt2j/2i1F0GYS96j/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9u
|
||||
X3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokNmQwZTBiNjktMWE4Yi00MWQ3LTk2OGUt
|
||||
ZTJkN2NkYjFjODAzShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3Vh
|
||||
Z2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgAShsKFWNyZXdfbnVtYmVyX29mX2Fn
|
||||
ZW50cxICGABKEwoLY3Jld19hZ2VudHMSBAoCW11KEgoKY3Jld190YXNrcxIECgJbXUooCghwbGF0
|
||||
Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggK
|
||||
BjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24S
|
||||
ZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1Qg
|
||||
MjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxIC
|
||||
GAx6AhgBErIEChAxDMeIyVLdG4XERTtm7zAeEgiH3Tf/w9XySCoMQ3JldyBDcmVhdGVkMAE5INDg
|
||||
o/9otRdBWJfho/9otRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92ZXJz
|
||||
aW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDJjYzJlOTZhLTI4ZDctNDgzMy04Mzk3LTk1OTQw
|
||||
ZDIzZWEyY0ocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQK
|
||||
AmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAEobChVjcmV3X251bWJlcl9vZl9hZ2VudHMS
|
||||
AhgAShMKC2NyZXdfYWdlbnRzEgQKAltdShIKCmNyZXdfdGFza3MSBAoCW11KKAoIcGxhdGZvcm0S
|
||||
HAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4z
|
||||
LjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURh
|
||||
cndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7
|
||||
IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIY
|
||||
ARKyBAoQaT1AK6fdr7p65Jb1fVxEoRIIhilpNt12qyYqDENyZXcgQ3JlYXRlZDABOWgd56P/aLUX
|
||||
QQDV56P/aLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhII
|
||||
CgYzLjExLjdKMQoHY3Jld19pZBImCiQzMTQxMzkzYi1iNDdjLTRjNjMtOTdmNS1iNGJiNjhkZTcy
|
||||
MWRKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoa
|
||||
ChRjcmV3X251bWJlcl9vZl90YXNrcxICGABKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAEoT
|
||||
CgtjcmV3X2FnZW50cxIECgJbXUoSCgpjcmV3X3Rhc2tzEgQKAltdSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESoAsK
|
||||
EGrC3Z3T0TbUGmLIlILJ2/0SCMSkDjTNbh3GKgxDcmV3IENyZWF0ZWQwATlYXimk/2i1F0EYtiqk
|
||||
/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokODNkMzBlMTAtZjdlOS00Yzk4LThjZjMtMGZkMjhjNTg0MTRiShwK
|
||||
DGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jl
|
||||
d19udW1iZXJfb2ZfdGFza3MSAhgCShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAJKiAUKC2Ny
|
||||
ZXdfYWdlbnRzEvgECvUEW3siaWQiOiAiMDcxNjI0MjYtZTk3MS00Y2U1LWI5MGUtZDk2ODg1NmM4
|
||||
ZjE0IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVy
|
||||
Ym9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAi
|
||||
ZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwg
|
||||
XCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVn
|
||||
YXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjhiOGE2
|
||||
M2E5LTdiYTEtNGZlMC04MWM5LWUxYjRjZjFhYWM2YyIsICJyb2xlIjogIlNlbmlvciBXcml0ZXIi
|
||||
LCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6
|
||||
IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51
|
||||
bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNs
|
||||
YXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0
|
||||
b29sc19uYW1lcyI6IFtdfV1KiQIKCmNyZXdfdGFza3MS+gEK9wFbeyJpZCI6ICIwMDY2MDExMC01
|
||||
OGQ5LTRkOGUtYjM1Yi00ZDY0M2ZlNTk0YjMiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAi
|
||||
YWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjgy
|
||||
MGVhOTBkLTUyZmYtNDcyNi1hZjBkLWZlNzlmYjNjZDBiMSIsICJhc3luY19leGVjdXRpb24/Ijog
|
||||
ZmFsc2UsICJhZ2VudF9yb2xlIjogIlNlbmlvciBXcml0ZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1d
|
||||
SigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3Jl
|
||||
bGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1f
|
||||
dmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMw
|
||||
OjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoK
|
||||
CgRjcHVzEgIYDHoCGAESnQoKECAXGlPVhy3fsKPoQgzRxDoSCHsf2kNdPtmnKgxDcmV3IENyZWF0
|
||||
ZWQwATmA4Tyz/2i1F0Hwbz6z/2i1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0
|
||||
aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokMjA2MjIyM2UtNTY2OS00YjBhLWFj
|
||||
NzQtMzBkZmQ0N2M2OGQwSh4KDGNyZXdfcHJvY2VzcxIOCgxoaWVyYXJjaGljYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAkqIBQoLY3Jld19hZ2VudHMS+AQK9QRbeyJpZCI6ICIwNzE2MjQyNi1lOTcx
|
||||
LTRjZTUtYjkwZS1kOTY4ODU2YzhmMTQiLCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgIm1lbW9yeV9l
|
||||
bmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9y
|
||||
cG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVs
|
||||
X25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNo
|
||||
YXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX0sIHsiaWQiOiAiOGI4YTYzYTktN2JhMS00ZmUwLTgxYzktZTFiNGNmMWFhYzZjIiwgInJv
|
||||
bGUiOiAiU2VuaW9yIFdyaXRlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8i
|
||||
OiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAi
|
||||
bGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1w
|
||||
ZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25f
|
||||
ZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqEAQoKY3Jld190YXNrcxJ2CnRb
|
||||
eyJpZCI6ICIxOThjY2FiYy03NDhmLTQ2MzEtOTgyYS0wNDBhOTk3NzBkMzAiLCAiYXN5bmNfZXhl
|
||||
Y3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJOb25lIiwgInRvb2xzX25hbWVzIjogW119
|
||||
XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9y
|
||||
ZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3Jt
|
||||
X3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMToz
|
||||
MDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBK
|
||||
CgoEY3B1cxICGAx6AhgBEp0KChDm065n3J+O8xI0St3gB1pFEghMgYPAB+vGFioMQ3JldyBDcmVh
|
||||
dGVkMAE5SCALu/9otRdBiLYMu/9otRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5
|
||||
dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDk3ZDdkOTI3LTMyMzUtNGQ4ZC04
|
||||
NzJmLWY1ODc2M2UyZTAwMkoeCgxjcmV3X3Byb2Nlc3MSDgoMaGllcmFyY2hpY2FsShUKDWNyZXdf
|
||||
bGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVy
|
||||
X29mX2FnZW50cxICGAJKiAUKC2NyZXdfYWdlbnRzEvgECvUEW3siaWQiOiAiMDcxNjI0MjYtZTk3
|
||||
MS00Y2U1LWI5MGUtZDk2ODg1NmM4ZjE0IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlf
|
||||
ZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhf
|
||||
cnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2Rl
|
||||
bF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJD
|
||||
aGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVz
|
||||
IjogW119LCB7ImlkIjogIjhiOGE2M2E5LTdiYTEtNGZlMC04MWM5LWUxYjRjZjFhYWM2YyIsICJy
|
||||
b2xlIjogIlNlbmlvciBXcml0ZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KhAEKCmNyZXdfdGFza3MSdgp0
|
||||
W3siaWQiOiAiYzUyMDEwODctY2ZiYi00NGI1LWExYmEtMjY3OTkyMDlmMWQxIiwgImFzeW5jX2V4
|
||||
ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiTm9uZSIsICJ0b29sc19uYW1lcyI6IFtd
|
||||
fV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARKSCgoQ4Ch80ifm34dkfn0yKkAtAxIIMoxBWgpq37IqDENyZXcgQ3Jl
|
||||
YXRlZDABOTB5Zbv/aLUXQeCpZrv/aLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5w
|
||||
eXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQ3YzEzZjk4YS04MWNjLTQ4MmMt
|
||||
YjI5MS1kMzE4NWVkYTk2YTdKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAkqABQoLY3Jld19hZ2VudHMS8AQK7QRbeyJpZCI6ICJlODQ5ZDlkMi02ODM4
|
||||
LTRjZGMtODQ5OS1jNDY5ZmVkYjU3ZTYiLCAicm9sZSI6ICJDRU8iLCAibWVtb3J5X2VuYWJsZWQ/
|
||||
IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51
|
||||
bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwi
|
||||
OiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5B
|
||||
SVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjogW119LCB7
|
||||
ImlkIjogIjhiOGE2M2E5LTdiYTEtNGZlMC04MWM5LWUxYjRjZjFhYWM2YyIsICJyb2xlIjogIlNl
|
||||
bmlvciBXcml0ZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2Us
|
||||
ICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7
|
||||
XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVc
|
||||
IjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/
|
||||
IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KgwEKCmNyZXdfdGFza3MSdQpzW3siaWQiOiAi
|
||||
ZTc5OTg1OTktMDBlNS00NWFhLTllMzItNmYxZjFiYjY0YjU5IiwgImFzeW5jX2V4ZWN1dGlvbj8i
|
||||
OiBmYWxzZSwgImFnZW50X3JvbGUiOiAiQ0VPIiwgInRvb2xzX25hbWVzIjogW119XUooCghwbGF0
|
||||
Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggK
|
||||
BjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24S
|
||||
ZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1Qg
|
||||
MjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxIC
|
||||
GAx6AhgBEqALChD9D1juNXXoNRjaGsioq4/tEgjeXlBBQMr5qioMQ3JldyBDcmVhdGVkMAE5qDD5
|
||||
yv9otRdBoNL6yv9otRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92ZXJz
|
||||
aW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGI2NDlhODY2LWJhNTAtNGY5MC04ODAzLWVkYjVm
|
||||
MmM0M2NjNUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQK
|
||||
AmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAkobChVjcmV3X251bWJlcl9vZl9hZ2VudHMS
|
||||
AhgCSogFCgtjcmV3X2FnZW50cxL4BAr1BFt7ImlkIjogIjA3MTYyNDI2LWU5NzEtNGNlNS1iOTBl
|
||||
LWQ5Njg4NTZjOGYxNCIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVtb3J5X2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGws
|
||||
ICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBc
|
||||
ImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwi
|
||||
fSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJp
|
||||
ZCI6ICI4YjhhNjNhOS03YmExLTRmZTAtODFjOS1lMWI0Y2YxYWFjNmMiLCAicm9sZSI6ICJTZW5p
|
||||
b3IgV3JpdGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAi
|
||||
bWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wi
|
||||
bmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6
|
||||
IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6
|
||||
IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSokCCgpjcmV3X3Rhc2tzEvoBCvcBW3siaWQiOiAi
|
||||
YzEwODczYmUtOTUxMi00MmQ5LTliY2UtNTAwNWY2OTJkNDlkIiwgImFzeW5jX2V4ZWN1dGlvbj8i
|
||||
OiBmYWxzZSwgImFnZW50X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFtdfSwg
|
||||
eyJpZCI6ICJiNDVkMzk5ZC0xMWZiLTQ3M2UtOWY2Ni1kNmZkNWUyMWM5ZGIiLCAiYXN5bmNfZXhl
|
||||
Y3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTZW5pb3IgV3JpdGVyIiwgInRvb2xzX25h
|
||||
bWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBw
|
||||
bGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsK
|
||||
EHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERl
|
||||
YyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJN
|
||||
NjRfVDYwMzBKCgoEY3B1cxICGAx6AhgBEuUHChB2eKFfnHcjT+Y1XIuw/jQZEgj2LdHRO4bsiCoM
|
||||
Q3JldyBDcmVhdGVkMAE5oCmJ5f9otRdBGI2K5f9otRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4x
|
||||
MS4xShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDA3MjA1ZjNkLTEw
|
||||
YzYtNDlmOS04NjIxLWQ4N2MyYTFmYzI2OUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoV
|
||||
Cg1jcmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3
|
||||
X251bWJlcl9vZl9hZ2VudHMSAhgBSswCCgtjcmV3X2FnZW50cxK8Agq5Alt7ImlkIjogIjA3MTYy
|
||||
NDI2LWU5NzEtNGNlNS1iOTBlLWQ5Njg4NTZjOGYxNCIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAi
|
||||
bWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1
|
||||
LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGws
|
||||
IFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNz
|
||||
XCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29s
|
||||
c19uYW1lcyI6IFtdfV1KigEKCmNyZXdfdGFza3MSfAp6W3siaWQiOiAiMDQzZmQ5YzItYWQ2My00
|
||||
ZTQyLWFmNGMtYjNkY2Q0ZmMxMWI5IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50
|
||||
X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoa
|
||||
bWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBK
|
||||
GwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndp
|
||||
biBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJv
|
||||
b3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARKj
|
||||
CwoQ2AqlO3mh1jyJg6EhNAcochII1e5CgQPQn9IqDENyZXcgQ3JlYXRlZDABOTh3lPT/aLUXQcAB
|
||||
lvT/aLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhIICgYz
|
||||
LjExLjdKMQoHY3Jld19pZBImCiQwZGQwMWMzOS04MTU1LTRlNTYtYmJmMi0wNDQ0YzRkNzA4ZjRK
|
||||
HAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRj
|
||||
cmV3X251bWJlcl9vZl90YXNrcxICGAJKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAkr9BAoL
|
||||
Y3Jld19hZ2VudHMS7QQK6gRbeyJpZCI6ICJlODQ5ZDlkMi02ODM4LTRjZGMtODQ5OS1jNDY5ZmVk
|
||||
YjU3ZTYiLCAicm9sZSI6ICJDRU8iLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjA3MTYyNDI2LWU5
|
||||
NzEtNGNlNS1iOTBlLWQ5Njg4NTZjOGYxNCIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVtb3J5
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4
|
||||
X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9k
|
||||
ZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwi
|
||||
Q2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1l
|
||||
cyI6IFtdfV1KlwIKCmNyZXdfdGFza3MSiAIKhQJbeyJpZCI6ICIyNjJiMGQyOC05ODQ1LTQ4YTct
|
||||
ODM0Ny02YzUzODI5Y2MwM2EiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9s
|
||||
ZSI6ICJDRU8iLCAidG9vbHNfbmFtZXMiOiBbIm11bHRpcGxpZXIiXX0sIHsiaWQiOiAiMTQ3MTAy
|
||||
YjUtMjdlYi00NTI3LTgxMDEtYzAzYTliMzc4NTc2IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxz
|
||||
ZSwgImFnZW50X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFsibXVsdGlwbGll
|
||||
ciJdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZv
|
||||
cm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0
|
||||
Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAg
|
||||
MjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2
|
||||
MDMwSgoKBGNwdXMSAhgMegIYARL1BwoQkfc0U9faxddMfMzGTSZa3xIIN+3nfveYLW4qDENyZXcg
|
||||
Q3JlYXRlZDABOVCNiQMAabUXQbD0igMAabUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoa
|
||||
Cg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQzZDc1ZWQzOS00MjBlLTRl
|
||||
ZDQtYWE0YS1hNWE3MGE2NjM2MTVKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jl
|
||||
d19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1i
|
||||
ZXJfb2ZfYWdlbnRzEgIYAUrJAgoLY3Jld19hZ2VudHMSuQIKtgJbeyJpZCI6ICI5NjZlZjM2NC1j
|
||||
YTcyLTRiNzUtOTdiNS1lMDI4ZTI5OGNkODgiLCAicm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogdHJ1ZSwgIm1heF9pdGVyIjogNSwgIm1heF9y
|
||||
cG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVs
|
||||
X25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNo
|
||||
YXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX1dSp0BCgpjcmV3X3Rhc2tzEo4BCosBW3siaWQiOiAiZTM2YWM2YTgtZDc0Yy00NjZlLTg3
|
||||
YTItM2M2NDVlZmU5ZTJhIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUi
|
||||
OiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogWyJnZXRfZmluYWxfYW5zd2VyIl19XUooCghw
|
||||
bGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNl
|
||||
EggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNp
|
||||
b24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQ
|
||||
U1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1
|
||||
cxICGAx6AhgBEt0IChCFv3PoygvwjOr5nDCoxodOEghLXBOUSJrl5yoMQ3JldyBDcmVhdGVkMAE5
|
||||
kMRPDABptRdBCChRDABptRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92
|
||||
ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDc5OThjMGQ1LTRlOTItNDdjZi1hMjU3LTNj
|
||||
ZGE1YTQwZjRkZEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdl
|
||||
EgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAkobChVjcmV3X251bWJlcl9vZl9hZ2Vu
|
||||
dHMSAhgBSsoCCgtjcmV3X2FnZW50cxK6Agq3Alt7ImlkIjogIjZhYTY3MDkwLTM5ZTAtNDlmMC1h
|
||||
ZDZlLTAyNjA1ZTUxYzdlYSIsICJyb2xlIjogInRlc3Qgcm9sZSIsICJtZW1vcnlfZW5hYmxlZD8i
|
||||
OiB0cnVlLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxs
|
||||
LCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjog
|
||||
XCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlc
|
||||
In0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoQC
|
||||
CgpjcmV3X3Rhc2tzEvUBCvIBW3siaWQiOiAiMWJiNzQ5Y2MtYjhkYi00NDEzLWFkMGQtOWI1OTQ4
|
||||
YTI0ZGYyIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAidGVzdCBy
|
||||
b2xlIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogImZiNDI0MDZlLTlmYjEtNDhmZC1hNTRh
|
||||
LTdiY2MzZmE0NGQyOCIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjog
|
||||
InRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQu
|
||||
My1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZv
|
||||
cm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwg
|
||||
VmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEw
|
||||
MDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLiBwoQgK6Vti48
|
||||
YjJuaBViDM57pxIIwlAqvHM82XQqDENyZXcgQ3JlYXRlZDABOeBsdREAabUXQSjYdhEAabUXShoK
|
||||
DmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoH
|
||||
Y3Jld19pZBImCiQ1YTA0MWRhNS1jY2MxLTRiNzEtODQ4NC02YmQ5OGZhMWRkZjhKHAoMY3Jld19w
|
||||
cm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJl
|
||||
cl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrKAgoLY3Jld19hZ2Vu
|
||||
dHMSugIKtwJbeyJpZCI6ICI4NjhhNjUxZi01MDg4LTRlNWMtYjUyMy1hZmRjZjhhMzk4MGYiLCAi
|
||||
cm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/Ijog
|
||||
dHJ1ZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxt
|
||||
IjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJh
|
||||
dHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5h
|
||||
YmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqJAQoKY3Jld190YXNrcxJ7CnlbeyJp
|
||||
ZCI6ICJiMmI5OWVhYy04M2YwLTQxYjMtYWFkNC02NWE3MWQzZTVjODEiLCAiYXN5bmNfZXhlY3V0
|
||||
aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMiOiBb
|
||||
XX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3Jt
|
||||
X3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZv
|
||||
cm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIx
|
||||
OjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAz
|
||||
MEoKCgRjcHVzEgIYDHoCGAESmAwKEC3+N10HF2aTbIa5eJa41zwSCInyjSyrV37fKgxDcmV3IENy
|
||||
ZWF0ZWQwATnYQ4MRAGm1F0GIdIQRAGm1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoO
|
||||
cHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokOTJmMWZmNjgtNjVlYi00Mjcw
|
||||
LWJmMmEtNDdjZDdiOGZmYTY4ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdf
|
||||
bGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgDShsKFWNyZXdfbnVtYmVy
|
||||
X29mX2FnZW50cxICGAJKiAUKC2NyZXdfYWdlbnRzEvgECvUEW3siaWQiOiAiMDcxNjI0MjYtZTk3
|
||||
MS00Y2U1LWI5MGUtZDk2ODg1NmM4ZjE0IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlf
|
||||
ZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhf
|
||||
cnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2Rl
|
||||
bF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJD
|
||||
aGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVz
|
||||
IjogW119LCB7ImlkIjogIjhiOGE2M2E5LTdiYTEtNGZlMC04MWM5LWUxYjRjZjFhYWM2YyIsICJy
|
||||
b2xlIjogIlNlbmlvciBXcml0ZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KgQMKCmNyZXdfdGFza3MS8gIK
|
||||
7wJbeyJpZCI6ICIxMmI4Y2QxNi0zNTRhLTQwMTktOTc4Yi02NjUxOTdiYjFkYzkiLCAiYXN5bmNf
|
||||
ZXhlY3V0aW9uPyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbXX0sIHsiaWQiOiAiOWM4MWVjOWQtMmVlMy00YmRjLTg0M2UtOGE5OTg0ZGMyMTJlIiwg
|
||||
ImFzeW5jX2V4ZWN1dGlvbj8iOiB0cnVlLCAiYWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRv
|
||||
b2xzX25hbWVzIjogW119LCB7ImlkIjogIjk1Njg4M2E3LWEyYzQtNDcxMy1hNDExLTIwODJiYjMy
|
||||
YzQ1MyIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNlbmlvciBX
|
||||
cml0ZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJt
|
||||
NjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5
|
||||
c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNp
|
||||
b24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44
|
||||
MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAES5AcKEFZ6R51jS3b7NAII
|
||||
FFJxc/USCD4Fymb2I6UhKgxDcmV3IENyZWF0ZWQwATnIxUwSAGm1F0GIHU4SAGm1F0oaCg5jcmV3
|
||||
YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdf
|
||||
aWQSJgokNGZlNThjMzMtZWIwOS00YWVjLTliNTAtZTBmNjQ0NGQyOGUyShwKDGNyZXdfcHJvY2Vz
|
||||
cxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2Zf
|
||||
dGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFKzAIKC2NyZXdfYWdlbnRzErwC
|
||||
CrkCW3siaWQiOiAiMmFiOTQ1ZWMtNmFkZi00ZjhlLThjYzEtZDc1YzM0MGE1NWIwIiwgInJvbGUi
|
||||
OiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxz
|
||||
ZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjog
|
||||
IntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVy
|
||||
ZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxl
|
||||
ZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqJAQoKY3Jld190YXNrcxJ7CnlbeyJpZCI6
|
||||
ICI2YmJmOTM4OS05MDVmLTRiZTQtOTU4OC04OWM0YzgzNDVkNzciLCAiYXN5bmNfZXhlY3V0aW9u
|
||||
PyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1d
|
||||
SigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3Jl
|
||||
bGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1f
|
||||
dmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMw
|
||||
OjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoK
|
||||
CgRjcHVzEgIYDHoCGAES5AcKEKSvLIWCtcA5+CGbFqDf51ESCLgud/yzYGKJKgxDcmV3IENyZWF0
|
||||
ZWQwATmomtgSAGm1F0FAz9kSAGm1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0
|
||||
aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokOTcxNTBmZTQtNDEzZS00YjMyLWFh
|
||||
YzgtMWZmMTdmMWFlMWIwShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFu
|
||||
Z3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29m
|
||||
X2FnZW50cxICGAFKzAIKC2NyZXdfYWdlbnRzErwCCrkCW3siaWQiOiAiZTZlZTNiYjktYTM4Yy00
|
||||
NjkzLTg1YzAtOWFlN2FiMzY3OTU0IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5h
|
||||
YmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBt
|
||||
IjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9u
|
||||
YW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0
|
||||
T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjog
|
||||
W119XUqJAQoKY3Jld190YXNrcxJ7CnlbeyJpZCI6ICJlNzU3OWM0YS0zN2M4LTQzZjYtYjU3OS03
|
||||
MDYzMjgzOTM2MmUiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJl
|
||||
c2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMt
|
||||
YXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3Jt
|
||||
X3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZl
|
||||
cnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAw
|
||||
Mi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAEShAgKEACMX4S7lXMV
|
||||
Uso4rDzumw4SCOFSyoPA+ILgKgxDcmV3IENyZWF0ZWQwATmYVj8UAGm1F0HgwUAUAGm1F0oaCg5j
|
||||
cmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2Ny
|
||||
ZXdfaWQSJgokYTMyOTVmMmYtZTljNS00MDdkLWE1NDAtZjRmYTI1ZDdlYjIzShwKDGNyZXdfcHJv
|
||||
Y2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJf
|
||||
b2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFK2gIKC2NyZXdfYWdlbnRz
|
||||
EsoCCscCW3siaWQiOiAiZDc5YzYxYTMtZWQxYy00OTdhLTliYjgtMjg1OWExNzllZDgxIiwgInJv
|
||||
bGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZh
|
||||
bHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0i
|
||||
OiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0
|
||||
dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFi
|
||||
bGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFsibGVhcm5fYWJvdXRfQUkiXX1dSpsBCgpjcmV3
|
||||
X3Rhc2tzEowBCokBW3siaWQiOiAiNzgxNzdjNzItNjRhZS00M2ZlLWI1OTItZTliNTlmNzNiNWMw
|
||||
IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAidGVzdCByb2xlIiwg
|
||||
InRvb2xzX25hbWVzIjogWyJsZWFybl9hYm91dF9BSSJdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1Mt
|
||||
MTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxh
|
||||
dGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJu
|
||||
ZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51
|
||||
LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLdBwoQ+UFv
|
||||
fqm+K1G9c19NjUlY3hIID8INZeMycOYqDENyZXcgQ3JlYXRlZDABOUDLSiMAabUXQeBRTCMAabUX
|
||||
ShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdK
|
||||
MQoHY3Jld19pZBImCiRmZTE3MjQzYS1mYTQyLTRmNTAtYmUwZi1lNDY0MDc3MTc3MDhKHAoMY3Jl
|
||||
d19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251
|
||||
bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrIAgoLY3Jld19h
|
||||
Z2VudHMSuAIKtQJbeyJpZCI6ICIzODAxZmQ2Yy1jMDY2LTQ1ZTItOTY4OC03NmY5YWQ4OTU3YzUi
|
||||
LCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/Ijog
|
||||
ZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxs
|
||||
bSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVy
|
||||
YXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2Vu
|
||||
YWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KhgEKCmNyZXdfdGFza3MSeAp2W3si
|
||||
aWQiOiAiZDE4YjExNGEtMDVhMC00ZmNkLWExZjYtNmM3YTcwMjRjNTZmIiwgImFzeW5jX2V4ZWN1
|
||||
dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVyIiwgInRvb2xzX25hbWVzIjogW119
|
||||
XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9y
|
||||
ZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3Jt
|
||||
X3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMToz
|
||||
MDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBK
|
||||
CgoEY3B1cxICGAx6AhgBEt0HChDviIet1EG5tkqxAn2xWMkEEgjk0HxsxYdn7SoMQ3JldyBDcmVh
|
||||
dGVkMAE5qDmFJgBptRdBMEGHJgBptRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5
|
||||
dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGY1ZjFjYjhiLTZiNGEtNDZiMy1i
|
||||
MTQxLTQ5OWMwMGE0OGIxOUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xh
|
||||
bmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9v
|
||||
Zl9hZ2VudHMSAhgBSsgCCgtjcmV3X2FnZW50cxK4Agq1Alt7ImlkIjogIjE1OGE5ZTU0LTkwM2Mt
|
||||
NDg1Yi1iYzIzLTFhZGQzMjliNjMyOCIsICJyb2xlIjogIlNjb3JlciIsICJtZW1vcnlfZW5hYmxl
|
||||
ZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjog
|
||||
bnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1l
|
||||
XCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3Bl
|
||||
bkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119
|
||||
XUqGAQoKY3Jld190YXNrcxJ4CnZbeyJpZCI6ICIxYjE0NWQ4OS05NzA0LTRmZmEtYTUzNC05YzFm
|
||||
MmJlNjc4MjQiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTY29y
|
||||
ZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQt
|
||||
YXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3Rl
|
||||
bRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24g
|
||||
MjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41
|
||||
fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAES1QgKEBuPsV+dBTMNNIWDF+d2
|
||||
KokSCO2zgUpXmdnDKgxDcmV3IENyZWF0ZWQwATlw/5kpAGm1F0HQZpspAGm1F0oaCg5jcmV3YWlf
|
||||
dmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQS
|
||||
JgokYjY2ZjY1MTgtYjYwZC00YjZjLWFkZGUtMTVmZGViMWQ5ZmQ1ShwKDGNyZXdfcHJvY2VzcxIM
|
||||
CgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFz
|
||||
a3MSAhgCShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFKyAIKC2NyZXdfYWdlbnRzErgCCrUC
|
||||
W3siaWQiOiAiMjQ1NDlmZjctOGZhZS00ZTYwLThkMGEtNzNlYzkxNmE3ZDc0IiwgInJvbGUiOiAi
|
||||
U2NvcmVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4
|
||||
X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFt
|
||||
ZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAu
|
||||
NywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZh
|
||||
bHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSv4BCgpjcmV3X3Rhc2tzEu8BCuwBW3siaWQiOiAiNjk1
|
||||
NGJjZDMtN2EwNS00YTg1LTk0ZGEtN2UzMGM0YTkwNDE4IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBm
|
||||
YWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVyIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjog
|
||||
IjdiM2JmZDY4LTM3YjctNDk3Mi05NWI5LWU2M2Y3MGMwYTliZCIsICJhc3luY19leGVjdXRpb24/
|
||||
IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoI
|
||||
cGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFz
|
||||
ZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJz
|
||||
aW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkg
|
||||
UFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNw
|
||||
dXMSAhgMegIYARLVCAoQMFavY6R+M8VKfR0IuD7J7BIIRT9oQ4tD6QEqDENyZXcgQ3JlYXRlZDAB
|
||||
OWi+Ci8AabUXQVA5DC8AabUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25f
|
||||
dmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQ1ODU2MzZmNy1kYjA2LTRmODYtYjcwMi1h
|
||||
MDM2ZmE5M2MxYmRKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFn
|
||||
ZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAJKGwoVY3Jld19udW1iZXJfb2ZfYWdl
|
||||
bnRzEgIYAUrIAgoLY3Jld19hZ2VudHMSuAIKtQJbeyJpZCI6ICI2NmUwYzMwMi1lZDI2LTQzYTQt
|
||||
YTA5NS0wYWVmMTY3N2JkZjkiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGws
|
||||
ICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBc
|
||||
ImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwi
|
||||
fSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1K/gEK
|
||||
CmNyZXdfdGFza3MS7wEK7AFbeyJpZCI6ICJlYzM4ZGNjNi02OTk4LTQwNGItODgyMi1mOTY0NDdk
|
||||
ZDUxN2IiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTY29yZXIi
|
||||
LCAidG9vbHNfbmFtZXMiOiBbXX0sIHsiaWQiOiAiMmM0NWFhZTMtNTRjYS00MDhkLTlmOTctYzAz
|
||||
NzUyYTE3NzI1IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2Nv
|
||||
cmVyIiwgInRvb2xzX25hbWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0
|
||||
LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0
|
||||
ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9u
|
||||
IDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEu
|
||||
NX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxICGAx6AhgBEt0HChAGyXCYUYk+AnEAKUUB
|
||||
f6Y7EgjZK54jkKVQASoMQ3JldyBDcmVhdGVkMAE5mEVnNABptRdBmLxoNABptRdKGgoOY3Jld2Fp
|
||||
X3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lk
|
||||
EiYKJDhkNTNkZTIyLWEyNTgtNGIwMS05M2MwLTM0NTAxMDQ4OGUwN0ocCgxjcmV3X3Byb2Nlc3MS
|
||||
DAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rh
|
||||
c2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSsgCCgtjcmV3X2FnZW50cxK4Agq1
|
||||
Alt7ImlkIjogIjJiMzRkOGZmLTY0OTAtNDY4MC1iZmRiLTQ0MTE3Y2NjZGNhMCIsICJyb2xlIjog
|
||||
IlNjb3JlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1h
|
||||
eF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5h
|
||||
bWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAw
|
||||
LjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBm
|
||||
YWxzZSwgInRvb2xzX25hbWVzIjogW119XUqGAQoKY3Jld190YXNrcxJ4CnZbeyJpZCI6ICI4MDRl
|
||||
M2EzYS00NjYxLTQ4ZDctYjgyMC02ODk5MmU1ZTEyODgiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZh
|
||||
bHNlLCAiYWdlbnRfcm9sZSI6ICJTY29yZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRm
|
||||
b3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoG
|
||||
MjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJn
|
||||
CmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAy
|
||||
MDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIY
|
||||
DHoCGAES3QcKEO2ikjl6HhLK2BkbjrmndFkSCJmJeKKJCQsHKgxDcmV3IENyZWF0ZWQwATmw6iQ3
|
||||
AGm1F0HYASc3AGm1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNp
|
||||
b24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZTkzNGVlNTItNTJmZi00OWUyLTg0OWMtYmMyNmQ4
|
||||
NWIxN2RmShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoC
|
||||
ZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxIC
|
||||
GAFKyAIKC2NyZXdfYWdlbnRzErgCCrUCW3siaWQiOiAiNjcyOTBjNzgtNDcwNC00MzYyLTlhOGMt
|
||||
NDMzZmEzNmYzODVmIiwgInJvbGUiOiAiU2NvcmVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUs
|
||||
ICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4
|
||||
biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQt
|
||||
NFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAi
|
||||
ZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoYBCgpjcmV3
|
||||
X3Rhc2tzEngKdlt7ImlkIjogIjYxN2I4YWZiLTJlMTMtNDcyZS04ZWFjLWZkNDdhODRhZjI4MCIs
|
||||
ICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0b29s
|
||||
c19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRK
|
||||
HAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndp
|
||||
bkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdl
|
||||
ZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNF
|
||||
X0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYAQ==
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '34171'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.22.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:58:05 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: Give
|
||||
me an integer score between 1-5 for the following title: ''The impact of AI
|
||||
in the future of work''\nYour final answer must be: The score of the title.\nAI:
|
||||
4\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1007'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=Fzt.3y3dhP8ruN1TWiH6jLm0CcZkNtLDNsdfw_mz02I-1708387084-1.0-Ad3cAs7mDylg8sxtK6Ttgud1zpXW+twV1R26Y6LmMVz79On3i0JoMvjsIx7HWsHeZoJYHqaoXrWBV6c7wESpq3A=;
|
||||
_cfuvid=j_ZBAzbEz7fMKPo29aitwE0ivRAZjvpZR17gpXyL1o4-1708387084116-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SRzU8CMRDF7/tXTHrhAoQVlIWbHowfiSejB2NIKcNuoe3UdlYlhP/dtMtHvDTp
|
||||
+82bvL7uCwChV2IOQjWSlfVmULVTeuaX8nZWXslftbH8cPf1/vTWzHabe9FPDlpuUPHJNVRkvUHW
|
||||
5DqsAkrGtLWcjqpxNR1VkwwsrdAkW+15MBmMbsrx0dGQVhjFHD4KAIB9PlM2t8JfMYdR/6RYjFHW
|
||||
KObnIQARyCRFyBh1ZOlY9C9QkWN0Oe5rg9C0VjqQcRuBG4TbR2CCIBnzlTUbhF4a1NZLxUDrNKNd
|
||||
xuuW24BJ+6Gw7QE5kBCVNFkrB9dDeO2W1vobI2jOnDrPZCiOsQ7n9xiqfaBlertrjTnra+10bBYB
|
||||
ZSSXskcm39kPBcBn7q39V4XwgaznBdMWXcz1z7p94vJFFzo+QSaW5qJflVVxTCjiLjLaxVq7GoMP
|
||||
OteYchaH4g8AAP//AwCU/+hLPQIAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858274ac7ab1a4bd-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:58:05 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1234'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299764'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 47ms
|
||||
x-request-id:
|
||||
- req_378add836bad9286db76ff9169f6ef43
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "4"}], "model": "gpt-4", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '435'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=Fzt.3y3dhP8ruN1TWiH6jLm0CcZkNtLDNsdfw_mz02I-1708387084-1.0-Ad3cAs7mDylg8sxtK6Ttgud1zpXW+twV1R26Y6LmMVz79On3i0JoMvjsIx7HWsHeZoJYHqaoXrWBV6c7wESpq3A=;
|
||||
_cfuvid=j_ZBAzbEz7fMKPo29aitwE0ivRAZjvpZR17gpXyL1o4-1708387084116-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA2xSS2/bMAy++1cIPCeDu2ax4+P6OGwBOqAoVmApDFmhHa+SKEh0sSzIfx9kp3Ya
|
||||
zAeB4MfvAdKHRAhot1AIUDvJyjg9z7uM1t9u3FO1qG4f65+rm2d3v1rywxuaALPIoOo3Kn5nfVJk
|
||||
nEZuyQ6w8igZo+pVlubXeZbmX3rA0BZ1pDWO54t5ury6PjF21CoMUIhfiRBCHPo3ZrNb/AOFSGfv
|
||||
HYMhyAahGIeEAE86dkCG0AaWlmE2gYoso41xbaf1GcBEulRS68l4+A5n9bQgqXX5Y/30du/Wz6/o
|
||||
s8p/vcvqv9+b1X535jdI710fqO6sGhdzho/94sJMCLDS9NxHRR4fOnYdX9CFAOmbzqDlGB0OG7uB
|
||||
EMc3UIjFxh7hw/wx+V/9cqqO4241Nc5TFS5WBXVr27ArPcrQR4bA5AaLKPfS37D7cBZwnozjkukV
|
||||
bRRcfh7kYPpbJjA/YUws9dTO0uQUD8I+MJqybm2D3vl2vGdyTP4BAAD//wMAkRH1fcYCAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858274b5dc11a4bd-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:58:06 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '579'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299981'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 3ms
|
||||
x-request-id:
|
||||
- req_e791c6467b700cfe727e205bc0a512b4
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
848
tests/cassettes/test_save_task_output.yaml
Normal file
848
tests/cassettes/test_save_task_output.yaml
Normal file
@@ -0,0 +1,848 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo complete the task you MUST follow the format:\n\n```\nFinal Answer:
|
||||
[your most complete final answer goes here]\n``` You must use these formats,
|
||||
my life depends on it.This is the summary of your work so far:\nBegin! This
|
||||
is VERY important to you, your job depends on it!\n\nCurrent Task: Give me an
|
||||
integer score between 1-5 for the following title: ''The impact of AI in the
|
||||
future of work''\nYour final answer must be: The score of the title.\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '692'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8u7fc3hJpVbV3T3SDSWArVIw5o4Gt","object":"chat.completion.chunk","created":1708386544,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8582677c0f6200e2-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:49:04 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=7sCpUch4zHBRr3bC0ZrXiZ.hp5JDRz10XR.quEnwOrA-1708386544-1.0-Afdz/ZpiEUiaPexcCrxugPwkeqRkeirb63bcWwb2oQP4BxG9mVHI7ouMoBhxJcQysgMju/x4AOA6ugypjle7VW0=;
|
||||
path=/; expires=Tue, 20-Feb-24 00:19:04 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=ekJFOKa47vCR_bxQqig9km7tYBNhHULDxrGlY80MRHE-1708386544507-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '219'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299845'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_2f71caa23336ee2e58caeaa115a97d5f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: !!binary |
|
||||
CpeDAgokCiIKDHNlcnZpY2UubmFtZRISChBjcmV3QUktdGVsZW1ldHJ5Eu2CAgoSChBjcmV3YWku
|
||||
dGVsZW1ldHJ5EvIHChAaBf6ADKyntq+vRGX9SJpHEgiijN4P4Uh2ECoMQ3JldyBDcmVhdGVkMAE5
|
||||
8CnAyIFotRdBWE7FyIFotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92
|
||||
ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGYyMWRiZGJkLTQxZTgtNDU3MC1iMmI3LTQw
|
||||
NzdiZTNhMDc3MEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdl
|
||||
EgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2Vu
|
||||
dHMSAhgBSsYCCgtjcmV3X2FnZW50cxK2AgqzAlt7ImlkIjogImJhNmVhNTM3LWJmNDItNDJjYi1h
|
||||
ZjMwLTU2MmZmN2NhNWQ5YyIsICJyb2xlIjogInRlc3Qgcm9sZSIsICJtZW1vcnlfZW5hYmxlZD8i
|
||||
OiB0cnVlLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiA0LCAibWF4X3JwbSI6IDEwLCAi
|
||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfV1KnQEKCmNy
|
||||
ZXdfdGFza3MSjgEKiwFbeyJpZCI6ICJkNmVlYzc5NC1kOTU0LTQwZTMtYTYyNi0xYzE2MDJlOWE3
|
||||
NWIiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUi
|
||||
LCAidG9vbHNfbmFtZXMiOiBbImdldF9maW5hbF9hbnN3ZXIiXX1dSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESowsK
|
||||
EC2mUzP54YHc6LDJLAdu6acSCMTJm+B7lvLuKgxDcmV3IENyZWF0ZWQwATkQF53UgWi1F0EA5J7U
|
||||
gWi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokODZlNjBkZTEtNzVmMy00YjFjLWFhOTItZDg3ODI2MzhlZjMxShwK
|
||||
DGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jl
|
||||
d19udW1iZXJfb2ZfdGFza3MSAhgCShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAJK/QQKC2Ny
|
||||
ZXdfYWdlbnRzEu0ECuoEW3siaWQiOiAiYzVlOTFkYWQtMDRiMi00NzM2LWFjZTMtZWY5NzRiYjVi
|
||||
ODQ3IiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJi
|
||||
b3NlPyI6IHRydWUsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IDEwLCAiaTE4biI6ICJlbiIs
|
||||
ICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRl
|
||||
bXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlv
|
||||
bl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJpZCI6ICIyYjcwOGEzYy1l
|
||||
MmEyLTQxN2QtOTdiMS0wMWVmZWUzZjg4NWEiLCAicm9sZSI6ICJ0ZXN0IHJvbGUyIiwgIm1lbW9y
|
||||
eV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IHRydWUsICJtYXhfaXRlciI6IDIsICJtYXhf
|
||||
cnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2Rl
|
||||
bF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJD
|
||||
aGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiB0cnVlLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX1dSpcCCgpjcmV3X3Rhc2tzEogCCoUCW3siaWQiOiAiN2FhMGFlZTUtMTU2Yy00NTczLThh
|
||||
OGQtMzFiODM4MDY0YjY1IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUi
|
||||
OiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjhhNzU1NDM3LWEwZjQt
|
||||
NDVlYi1iMGZmLTY0MjNiNTVjZjlkOCIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2Vu
|
||||
dF9yb2xlIjogInRlc3Qgcm9sZTIiLCAidG9vbHNfbmFtZXMiOiBbImdldF9maW5hbF9hbnN3ZXIi
|
||||
XX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3Jt
|
||||
X3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZv
|
||||
cm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIx
|
||||
OjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAz
|
||||
MEoKCgRjcHVzEgIYDHoCGAES9QcKEFwgkzU6sNPz3QKFgtqCsCQSCJoGYLG1gNkBKgxDcmV3IENy
|
||||
ZWF0ZWQwATkwDUbegWi1F0Eg2kfegWi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoO
|
||||
cHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokNDBiNzg0ZGMtZDIxYi00NmM3
|
||||
LTliMjAtMWNhMWE2MWU3Mzc1ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdf
|
||||
bGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVy
|
||||
X29mX2FnZW50cxICGAFKyQIKC2NyZXdfYWdlbnRzErkCCrYCW3siaWQiOiAiNmE3ZGY0MDQtY2Nh
|
||||
YS00MTMwLThkOTYtYTFhZGI2MzIyMjJhIiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9l
|
||||
bmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IHRydWUsICJtYXhfaXRlciI6IDE1LCAibWF4X3Jw
|
||||
bSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxf
|
||||
bmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hh
|
||||
dE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjog
|
||||
W119XUqdAQoKY3Jld190YXNrcxKOAQqLAVt7ImlkIjogIjIzOTlhNDljLTlmNjAtNDU4My1hOGE5
|
||||
LTIyNjU3ODBkMDE1MSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjog
|
||||
InRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFsiZ2V0X2ZpbmFsX2Fuc3dlciJdfV1KKAoIcGxh
|
||||
dGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRII
|
||||
CgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9u
|
||||
EmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNU
|
||||
IDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMS
|
||||
AhgMegIYARL0BwoQGVyvg83A0+tCto6XeaWbshIIsQstUO/9OJIqDENyZXcgQ3JlYXRlZDABOYAP
|
||||
1OSBaLUXQQDF1eSBaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVy
|
||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQwMTU5ZjZjNS0yNDkxLTRmMGMtYTc1NC1hMzIy
|
||||
NTM2MmJkMmRKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
||||
EgIYAUrIAgoLY3Jld19hZ2VudHMSuAIKtQJbeyJpZCI6ICI2MmQ4OTg0Yy1kNThlLTRlZGMtYWM5
|
||||
Mi1lYWI0ZTcxMjI4NDkiLCAicm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInZlcmJvc2U/IjogdHJ1ZSwgIm1heF9pdGVyIjogNCwgIm1heF9ycG0iOiBudWxsLCAi
|
||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfV1KnQEKCmNy
|
||||
ZXdfdGFza3MSjgEKiwFbeyJpZCI6ICI0MWExYTU3MS1iNmE0LTRlYTYtYThhZS02OGU1ZmE4M2E0
|
||||
OTUiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUi
|
||||
LCAidG9vbHNfbmFtZXMiOiBbImdldF9maW5hbF9hbnN3ZXIiXX1dSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESjwwK
|
||||
EMMtvuOpjbr3pcyF2HeSmB4SCMkWR08aQxAUKgxDcmV3IENyZWF0ZWQwATlAaAfugWi1F0FgLQnu
|
||||
gWi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokMWNkYWQ0Y2EtODIzNC00MzQzLWIxNzctNGIyN2Q0YjNjZGJkShwK
|
||||
DGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jl
|
||||
d19udW1iZXJfb2ZfdGFza3MSAhgDShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAJKggUKC2Ny
|
||||
ZXdfYWdlbnRzEvIECu8EW3siaWQiOiAiNWEwODIyODItZWRkNi00NjdiLTk1NWMtZDliNmI1Y2Vk
|
||||
ZmFmIiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJi
|
||||
b3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJl
|
||||
biIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBc
|
||||
InRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdh
|
||||
dGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJpZCI6ICI3Y2IwNmY2
|
||||
Mi1iNzFiLTRmNDItOGQ5ZS1hZDQyM2FjODQwMGMiLCAicm9sZSI6ICJ0ZXN0IHJvbGUyIiwgIm1l
|
||||
bW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwg
|
||||
Im1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBc
|
||||
Im1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wi
|
||||
OiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19u
|
||||
YW1lcyI6IFtdfV1K/gIKCmNyZXdfdGFza3MS7wIK7AJbeyJpZCI6ICI0NjhiM2QzZi00ZmYxLTQy
|
||||
OWQtOTYzMC02MWQ4MzNhNDYxODAiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRf
|
||||
cm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMiOiBbXX0sIHsiaWQiOiAiNmRmMmY1YmYt
|
||||
ZDZjOS00Y2RjLTkzNTYtNjZkYzMyZWIwYTc4IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwg
|
||||
ImFnZW50X3JvbGUiOiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjJm
|
||||
Y2FjNjNjLWVkNTItNDMwZS1hY2E3LTU1MmZjOTZkNjdkMyIsICJhc3luY19leGVjdXRpb24/Ijog
|
||||
ZmFsc2UsICJhZ2VudF9yb2xlIjogInRlc3Qgcm9sZTIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigK
|
||||
CHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVh
|
||||
c2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVy
|
||||
c2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5
|
||||
IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRj
|
||||
cHVzEgIYDHoCGAEShAgKEO+YplJZB62ddxyM6fGVOdESCL+wftUglT4gKgxDcmV3IENyZWF0ZWQw
|
||||
ATkwCzP9gWi1F0HojTT9gWi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9u
|
||||
X3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokYzhiZTYxYmQtNzRkNS00NGRkLTlmNzAt
|
||||
MmI5NzgwZjY2ZDIyShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3Vh
|
||||
Z2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2Fn
|
||||
ZW50cxICGAFK2gIKC2NyZXdfYWdlbnRzEsoCCscCW3siaWQiOiAiNzdlYjEwZWYtZTdlMi00NGVl
|
||||
LThlMjEtOTQ0NTAwOTgyNjM3IiwgInJvbGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVk
|
||||
PyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBu
|
||||
dWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVc
|
||||
IjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVu
|
||||
QUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFsibGVh
|
||||
cm5fYWJvdXRfQUkiXX1dSpsBCgpjcmV3X3Rhc2tzEowBCokBW3siaWQiOiAiY2FjZjYxYTEtYTI3
|
||||
Mi00MmYyLThkNjgtMzQzYjc1NzRkNjE4IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFn
|
||||
ZW50X3JvbGUiOiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogWyJsZWFybl9hYm91dF9BSSJd
|
||||
fV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARKECAoQ3/ART7Hu8wHqWsK8s7GPLhIIWK0QMBvW+rMqDENyZXcgQ3Jl
|
||||
YXRlZDABOeCW1AaCaLUXQdg41gaCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5w
|
||||
eXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQ5OGZhN2E2ZS00NDJhLTQ1Njgt
|
||||
YWIwZS0zYmRkNGJiMWUxYTRKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAUraAgoLY3Jld19hZ2VudHMSygIKxwJbeyJpZCI6ICJkNmU0OWUwYy00MmJj
|
||||
LTQxMjItYjBmNy1mNzNlYzE1ZTUwOTEiLCAicm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2Vu
|
||||
YWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3Jw
|
||||
bSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxf
|
||||
bmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hh
|
||||
dE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjog
|
||||
WyJsZWFybl9hYm91dF9BSSJdfV1KmwEKCmNyZXdfdGFza3MSjAEKiQFbeyJpZCI6ICJmZjUwOWJh
|
||||
Ni0wZWY1LTRhM2QtYmY2MC1iMTI1YjQxOWMwN2YiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNl
|
||||
LCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMiOiBbImxlYXJuX2Fib3V0
|
||||
X0FJIl19XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0
|
||||
Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBs
|
||||
YXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAy
|
||||
MCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRf
|
||||
VDYwMzBKCgoEY3B1cxICGAx6AhgBErIEChBkDRmgIQJSMs8XzeZJmy9WEgjvUY/YfexjlioMQ3Jl
|
||||
dyBDcmVhdGVkMAE5UGNUE4JotRdB8OlVE4JotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4x
|
||||
ShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGI2NjM0ZTcxLWEwZDkt
|
||||
NDY0Ny1hYzExLTNkZGU0MTU2OGI1MUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1j
|
||||
cmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAEobChVjcmV3X251
|
||||
bWJlcl9vZl9hZ2VudHMSAhgAShMKC2NyZXdfYWdlbnRzEgQKAltdShIKCmNyZXdfdGFza3MSBAoC
|
||||
W11KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARKyBAoQqzgXtMmkYCiVpHhtOc8l5BII8vXSrf+p/swqDENyZXcgQ3Jl
|
||||
YXRlZDABOaCLWROCaLUXQTBuWhOCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5w
|
||||
eXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRjMjQ0OGE2NC1jMjJlLTRmMTAt
|
||||
YWJhYy00MzQ3NTgwOWMwNjdKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGABKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAEoTCgtjcmV3X2FnZW50cxIECgJbXUoSCgpjcmV3X3Rhc2tzEgQKAltdSigK
|
||||
CHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVh
|
||||
c2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVy
|
||||
c2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5
|
||||
IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRj
|
||||
cHVzEgIYDHoCGAESsgQKEH9v7ozC/TRcIrFffRecZVQSCGk2sHo6qD3EKgxDcmV3IENyZWF0ZWQw
|
||||
ATmgIFAUgmi1F0G4FlEUgmi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9u
|
||||
X3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokMGY4OGQyYzEtMjA4Ni00MDdhLWFhY2Mt
|
||||
ZWRjNWNiNzljYjQ4ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3Vh
|
||||
Z2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgAShsKFWNyZXdfbnVtYmVyX29mX2Fn
|
||||
ZW50cxICGABKEwoLY3Jld19hZ2VudHMSBAoCW11KEgoKY3Jld190YXNrcxIECgJbXUooCghwbGF0
|
||||
Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggK
|
||||
BjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24S
|
||||
ZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1Qg
|
||||
MjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxIC
|
||||
GAx6AhgBErIEChBgGyM8bfOC9H6Yresp1LU2EghhF6gh58RxMCoMQ3JldyBDcmVhdGVkMAE5YGxT
|
||||
FIJotRdBsC9UFIJotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92ZXJz
|
||||
aW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDgwNWVjYzk0LWNjYjgtNGUxNS1iZjc0LWEyMjgw
|
||||
YTcxZjNmMEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQK
|
||||
AmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAEobChVjcmV3X251bWJlcl9vZl9hZ2VudHMS
|
||||
AhgAShMKC2NyZXdfYWdlbnRzEgQKAltdShIKCmNyZXdfdGFza3MSBAoCW11KKAoIcGxhdGZvcm0S
|
||||
HAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4z
|
||||
LjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURh
|
||||
cndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7
|
||||
IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIY
|
||||
ARKyBAoQyEH/nm/WLrr+txJI9c1MNRIInN5POTzXVX4qDENyZXcgQ3JlYXRlZDABOViFVhSCaLUX
|
||||
QQg5VxSCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhII
|
||||
CgYzLjExLjdKMQoHY3Jld19pZBImCiRkNTEyNjYyNS03ODFjLTQ2NTktYTAzNC01MmUyOWIxZTcz
|
||||
MGFKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoa
|
||||
ChRjcmV3X251bWJlcl9vZl90YXNrcxICGABKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAEoT
|
||||
CgtjcmV3X2FnZW50cxIECgJbXUoSCgpjcmV3X3Rhc2tzEgQKAltdSigKCHBsYXRmb3JtEhwKGm1h
|
||||
Y09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsK
|
||||
D3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4g
|
||||
S2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290
|
||||
OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAESoAsK
|
||||
EI0iKPIpK2XIwcBR3dYpPokSCNnvOoO6kqq4KgxDcmV3IENyZWF0ZWQwATnoXawUgmi1F0FY7K0U
|
||||
gmi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4x
|
||||
MS43SjEKB2NyZXdfaWQSJgokNTUwYzgzNzQtNTU2OS00ODk5LTkzMjgtMjkwMDFjOWE4ODFmShwK
|
||||
DGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jl
|
||||
d19udW1iZXJfb2ZfdGFza3MSAhgCShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAJKiAUKC2Ny
|
||||
ZXdfYWdlbnRzEvgECvUEW3siaWQiOiAiMTBmODVhZGQtYWVmMC00ODI4LTg2ZjgtYWIwNjQzMTE4
|
||||
NTYzIiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVy
|
||||
Ym9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAi
|
||||
ZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwg
|
||||
XCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVn
|
||||
YXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjM2MWQy
|
||||
Y2Y3LTQ4MzMtNDVlOS05MzBhLWVlMGQ1YWNjNGZlYyIsICJyb2xlIjogIlNlbmlvciBXcml0ZXIi
|
||||
LCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6
|
||||
IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51
|
||||
bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNs
|
||||
YXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0
|
||||
b29sc19uYW1lcyI6IFtdfV1KiQIKCmNyZXdfdGFza3MS+gEK9wFbeyJpZCI6ICI2MjQ2ZDc5Yy0x
|
||||
ZDY5LTRhZjUtYTIzZi1jMDFkYzA1ZWIyMmUiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAi
|
||||
YWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogImJi
|
||||
Yzg3NjllLWVjODQtNDhiOS1iZGQ3LWNmYzNmNGZmYmZmMyIsICJhc3luY19leGVjdXRpb24/Ijog
|
||||
ZmFsc2UsICJhZ2VudF9yb2xlIjogIlNlbmlvciBXcml0ZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1d
|
||||
SigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3Jl
|
||||
bGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1f
|
||||
dmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMw
|
||||
OjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoK
|
||||
CgRjcHVzEgIYDHoCGAESnQoKEKBL0fRR5foqjy0NBV/SMoESCBCt9aF+VH99KgxDcmV3IENyZWF0
|
||||
ZWQwATlwxi8jgmi1F0E4cDEjgmi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0
|
||||
aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZjAxNDIzZmMtMjBiOC00ZGExLWFj
|
||||
MjctZDJmZjBkYmM0YjNhSh4KDGNyZXdfcHJvY2VzcxIOCgxoaWVyYXJjaGljYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAkqIBQoLY3Jld19hZ2VudHMS+AQK9QRbeyJpZCI6ICIxMGY4NWFkZC1hZWYw
|
||||
LTQ4MjgtODZmOC1hYjA2NDMxMTg1NjMiLCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgIm1lbW9yeV9l
|
||||
bmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9y
|
||||
cG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVs
|
||||
X25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNo
|
||||
YXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX0sIHsiaWQiOiAiMzYxZDJjZjctNDgzMy00NWU5LTkzMGEtZWUwZDVhY2M0ZmVjIiwgInJv
|
||||
bGUiOiAiU2VuaW9yIFdyaXRlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8i
|
||||
OiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAi
|
||||
bGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1w
|
||||
ZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25f
|
||||
ZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqEAQoKY3Jld190YXNrcxJ2CnRb
|
||||
eyJpZCI6ICIwMjFkMjBkOS0yNzEwLTQ4ZWItYTE3Yy1jMmNkM2Y3MTE1ZGIiLCAiYXN5bmNfZXhl
|
||||
Y3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJOb25lIiwgInRvb2xzX25hbWVzIjogW119
|
||||
XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9y
|
||||
ZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3Jt
|
||||
X3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMToz
|
||||
MDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBK
|
||||
CgoEY3B1cxICGAx6AhgBEp0KChC60uV+UvE3TfkONIYsdPIWEgjtxjRGLdWXVyoMQ3JldyBDcmVh
|
||||
dGVkMAE5AFM4K4JotRdBEPE5K4JotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5
|
||||
dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDU0Y2Q3OTE4LTExY2YtNDFlYS04
|
||||
Y2I0LWMzZTUyMjk3ZTA3NEoeCgxjcmV3X3Byb2Nlc3MSDgoMaGllcmFyY2hpY2FsShUKDWNyZXdf
|
||||
bGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVy
|
||||
X29mX2FnZW50cxICGAJKiAUKC2NyZXdfYWdlbnRzEvgECvUEW3siaWQiOiAiMTBmODVhZGQtYWVm
|
||||
MC00ODI4LTg2ZjgtYWIwNjQzMTE4NTYzIiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlf
|
||||
ZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhf
|
||||
cnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2Rl
|
||||
bF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJD
|
||||
aGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVz
|
||||
IjogW119LCB7ImlkIjogIjM2MWQyY2Y3LTQ4MzMtNDVlOS05MzBhLWVlMGQ1YWNjNGZlYyIsICJy
|
||||
b2xlIjogIlNlbmlvciBXcml0ZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KhAEKCmNyZXdfdGFza3MSdgp0
|
||||
W3siaWQiOiAiZWMxNWE4OWEtZmUzYy00ODE0LWFiYzgtNmU5MjA2NTBhNjA2IiwgImFzeW5jX2V4
|
||||
ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiTm9uZSIsICJ0b29sc19uYW1lcyI6IFtd
|
||||
fV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1f
|
||||
cmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9y
|
||||
bV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6
|
||||
MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMw
|
||||
SgoKBGNwdXMSAhgMegIYARKSCgoQCVflxxGNwvMhP8LW1ShkjxIIgQsqXO3qazwqDENyZXcgQ3Jl
|
||||
YXRlZDABOVhYjyuCaLUXQaDDkCuCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5w
|
||||
eXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRlZmI2YmMzYy03N2RhLTQ2ZGMt
|
||||
YTBiYS0xNGM5ZjUzODY5ZjBKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19s
|
||||
YW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJf
|
||||
b2ZfYWdlbnRzEgIYAkqABQoLY3Jld19hZ2VudHMS8AQK7QRbeyJpZCI6ICIxMzNhMWQ3Zi1hNjA3
|
||||
LTQ1Y2YtYWUzOS1kYzUwNmZkNWM5ZmMiLCAicm9sZSI6ICJDRU8iLCAibWVtb3J5X2VuYWJsZWQ/
|
||||
IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51
|
||||
bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwi
|
||||
OiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5B
|
||||
SVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjogW119LCB7
|
||||
ImlkIjogIjM2MWQyY2Y3LTQ4MzMtNDVlOS05MzBhLWVlMGQ1YWNjNGZlYyIsICJyb2xlIjogIlNl
|
||||
bmlvciBXcml0ZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2Us
|
||||
ICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7
|
||||
XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVc
|
||||
IjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/
|
||||
IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KgwEKCmNyZXdfdGFza3MSdQpzW3siaWQiOiAi
|
||||
ZDFjMWEzNTEtZjVkNC00YTg0LTk4ZTEtNGIwMDAxMDllYWI1IiwgImFzeW5jX2V4ZWN1dGlvbj8i
|
||||
OiBmYWxzZSwgImFnZW50X3JvbGUiOiAiQ0VPIiwgInRvb2xzX25hbWVzIjogW119XUooCghwbGF0
|
||||
Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggK
|
||||
BjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24S
|
||||
ZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1Qg
|
||||
MjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxIC
|
||||
GAx6AhgBEqALChAb5sWVeq8pqmeyU+rhNMnjEghRQjxrXAphgCoMQ3JldyBDcmVhdGVkMAE5uOUA
|
||||
O4JotRdByIMCO4JotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92ZXJz
|
||||
aW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGI0Mzg3MzFjLWU5MGQtNDMxZS1hYzIxLTQzODM4
|
||||
MDY1OTNhNEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQK
|
||||
AmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAkobChVjcmV3X251bWJlcl9vZl9hZ2VudHMS
|
||||
AhgCSogFCgtjcmV3X2FnZW50cxL4BAr1BFt7ImlkIjogIjEwZjg1YWRkLWFlZjAtNDgyOC04NmY4
|
||||
LWFiMDY0MzExODU2MyIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVtb3J5X2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGws
|
||||
ICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBc
|
||||
ImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwi
|
||||
fSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfSwgeyJp
|
||||
ZCI6ICIzNjFkMmNmNy00ODMzLTQ1ZTktOTMwYS1lZTBkNWFjYzRmZWMiLCAicm9sZSI6ICJTZW5p
|
||||
b3IgV3JpdGVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAi
|
||||
bWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wi
|
||||
bmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6
|
||||
IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6
|
||||
IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSokCCgpjcmV3X3Rhc2tzEvoBCvcBW3siaWQiOiAi
|
||||
ZTM0MzE3ZTEtNGE1Mi00ZTIwLTk4NjktMzhjYTkzZjVhNTViIiwgImFzeW5jX2V4ZWN1dGlvbj8i
|
||||
OiBmYWxzZSwgImFnZW50X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFtdfSwg
|
||||
eyJpZCI6ICJlZTg0MmQ1MC00NjY4LTRiMDEtOTFmZS0zY2ZkMDFhOTRjZGEiLCAiYXN5bmNfZXhl
|
||||
Y3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTZW5pb3IgV3JpdGVyIiwgInRvb2xzX25h
|
||||
bWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBw
|
||||
bGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsK
|
||||
EHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERl
|
||||
YyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJN
|
||||
NjRfVDYwMzBKCgoEY3B1cxICGAx6AhgBEuUHChBqnLN4z+4Oh0fzwImxbgzUEgg5cOoePHNASioM
|
||||
Q3JldyBDcmVhdGVkMAE5MDfaVoJotRdBeKLbVoJotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4x
|
||||
MS4xShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDQ0ZWNhNmYyLTE5
|
||||
YzEtNDQ4ZS1iMmUwLTdkNDQ5MTQyODZjN0ocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoV
|
||||
Cg1jcmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3
|
||||
X251bWJlcl9vZl9hZ2VudHMSAhgBSswCCgtjcmV3X2FnZW50cxK8Agq5Alt7ImlkIjogIjEwZjg1
|
||||
YWRkLWFlZjAtNDgyOC04NmY4LWFiMDY0MzExODU2MyIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAi
|
||||
bWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1
|
||||
LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGws
|
||||
IFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNz
|
||||
XCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29s
|
||||
c19uYW1lcyI6IFtdfV1KigEKCmNyZXdfdGFza3MSfAp6W3siaWQiOiAiMzhjMjQ5YTEtY2I0YS00
|
||||
NWU4LWFjZWQtODQ1NTFhYTZmZDIwIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50
|
||||
X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoa
|
||||
bWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBK
|
||||
GwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndp
|
||||
biBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJv
|
||||
b3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARKj
|
||||
CwoQ2yJHNwDi7vo1/jP4UAxivhIIP8pLRbVUpa8qDENyZXcgQ3JlYXRlZDABOej4x2KCaLUXQehv
|
||||
yWKCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhIICgYz
|
||||
LjExLjdKMQoHY3Jld19pZBImCiQ0YzZjNDFkMy00ODNmLTRhYjItOTRmOC1kYzU5ZmJkNWVmZmVK
|
||||
HAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRj
|
||||
cmV3X251bWJlcl9vZl90YXNrcxICGAJKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAkr9BAoL
|
||||
Y3Jld19hZ2VudHMS7QQK6gRbeyJpZCI6ICIxMzNhMWQ3Zi1hNjA3LTQ1Y2YtYWUzOS1kYzUwNmZk
|
||||
NWM5ZmMiLCAicm9sZSI6ICJDRU8iLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjEwZjg1YWRkLWFl
|
||||
ZjAtNDgyOC04NmY4LWFiMDY0MzExODU2MyIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAibWVtb3J5
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4
|
||||
X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9k
|
||||
ZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwi
|
||||
Q2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1l
|
||||
cyI6IFtdfV1KlwIKCmNyZXdfdGFza3MSiAIKhQJbeyJpZCI6ICI2ZDAzNDRjOS1jYzVjLTQ1MTQt
|
||||
OGY4Ny02MzAxNGE4MWY4MjciLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9s
|
||||
ZSI6ICJDRU8iLCAidG9vbHNfbmFtZXMiOiBbIm11bHRpcGxpZXIiXX0sIHsiaWQiOiAiNDI0ZGQ0
|
||||
NGUtZjQ4OC00NGUxLWFkMTAtMTg4OGJmM2I4NmIyIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxz
|
||||
ZSwgImFnZW50X3JvbGUiOiAiUmVzZWFyY2hlciIsICJ0b29sc19uYW1lcyI6IFsibXVsdGlwbGll
|
||||
ciJdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZv
|
||||
cm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0
|
||||
Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAg
|
||||
MjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2
|
||||
MDMwSgoKBGNwdXMSAhgMegIYARL1BwoQrvhyfUymxzKC5iEWN2dWURIIVGkJAcjAZbgqDENyZXcg
|
||||
Q3JlYXRlZDABOVigVnGCaLUXQXATWHGCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoa
|
||||
Cg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRiYTA5NTE1Ni1kZTgwLTQ2
|
||||
MDQtODRmOC02ZjY0NDcyMGY5NTVKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jl
|
||||
d19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1i
|
||||
ZXJfb2ZfYWdlbnRzEgIYAUrJAgoLY3Jld19hZ2VudHMSuQIKtgJbeyJpZCI6ICI1NmJhNTI0MC1m
|
||||
Mjg3LTQ4MDYtYWU0Zi0zOWE3ZGVjOTUzNWIiLCAicm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5
|
||||
X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/IjogdHJ1ZSwgIm1heF9pdGVyIjogNSwgIm1heF9y
|
||||
cG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVs
|
||||
X25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNo
|
||||
YXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMi
|
||||
OiBbXX1dSp0BCgpjcmV3X3Rhc2tzEo4BCosBW3siaWQiOiAiODI1ZDVjYzgtYzY4Zi00YmVkLTg2
|
||||
NzMtOTMxZjRmMmFlMjA4IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUi
|
||||
OiAidGVzdCByb2xlIiwgInRvb2xzX25hbWVzIjogWyJnZXRfZmluYWxfYW5zd2VyIl19XUooCghw
|
||||
bGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNl
|
||||
EggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNp
|
||||
b24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQ
|
||||
U1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1
|
||||
cxICGAx6AhgBEt0IChBnG8wv7PJ1b3KFlw0SBOg0Egjvp/uJcbZA+CoMQ3JldyBDcmVhdGVkMAE5
|
||||
uAA4eoJotRdBGGg5eoJotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92
|
||||
ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJDExMjM1NzUyLWMyZTYtNGNjZS1iZWYyLTc3
|
||||
MjY2MGJjNzJjZkocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdl
|
||||
EgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAkobChVjcmV3X251bWJlcl9vZl9hZ2Vu
|
||||
dHMSAhgBSsoCCgtjcmV3X2FnZW50cxK6Agq3Alt7ImlkIjogImM5ZTNmMmNlLTNhMjEtNDdhYS05
|
||||
ZTAxLTQ5ZTdjNTdlODJjNyIsICJyb2xlIjogInRlc3Qgcm9sZSIsICJtZW1vcnlfZW5hYmxlZD8i
|
||||
OiB0cnVlLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxs
|
||||
LCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjog
|
||||
XCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlc
|
||||
In0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoQC
|
||||
CgpjcmV3X3Rhc2tzEvUBCvIBW3siaWQiOiAiMWUwOGVhOTctNzcwNS00NWVkLWEzYjAtOWIzMTA5
|
||||
ZmUwZGRmIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAidGVzdCBy
|
||||
b2xlIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjogIjAzZTlhYTY1LTgyOWEtNGFmNC1iZjc4
|
||||
LTI2MzdlOTUxMGY1OSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjog
|
||||
InRlc3Qgcm9sZSIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQu
|
||||
My1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZv
|
||||
cm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwg
|
||||
VmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEw
|
||||
MDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLiBwoQAfjjPs6D
|
||||
LCND7pliOfYfDhII++9fVfMcOB8qDENyZXcgQ3JlYXRlZDABObBwTX+CaLUXQXDITn+CaLUXShoK
|
||||
DmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdKMQoH
|
||||
Y3Jld19pZBImCiRhZTU3NTIxOC01ZmIxLTRmNTAtYmQ3ZC02YTQ0ODNkMjE4YWJKHAoMY3Jld19w
|
||||
cm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251bWJl
|
||||
cl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrKAgoLY3Jld19hZ2Vu
|
||||
dHMSugIKtwJbeyJpZCI6ICI4ZmVlNmRkMC1hNTlkLTRiZmItOTVkYS0wNzZkMzgzNTQ0MzIiLCAi
|
||||
cm9sZSI6ICJ0ZXN0IHJvbGUiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/Ijog
|
||||
dHJ1ZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxt
|
||||
IjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJh
|
||||
dHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5h
|
||||
YmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqJAQoKY3Jld190YXNrcxJ7CnlbeyJp
|
||||
ZCI6ICI2YzgwNjQyNi0zOTAwLTQyODYtYTZlMy1kOWY5ZDFmY2M4YmIiLCAiYXN5bmNfZXhlY3V0
|
||||
aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ0ZXN0IHJvbGUiLCAidG9vbHNfbmFtZXMiOiBb
|
||||
XX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3Jt
|
||||
X3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZv
|
||||
cm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIx
|
||||
OjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAz
|
||||
MEoKCgRjcHVzEgIYDHoCGAESmAwKEDsJBaixAJXHgKyhDHuP5ZMSCKsRgdLAeU1kKgxDcmV3IENy
|
||||
ZWF0ZWQwATnIHlp/gmi1F0GoR1t/gmi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoO
|
||||
cHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokMTQwMjBmNmUtOTg2ZC00ZWY4
|
||||
LWI5MDQtZjJmNTIxNTE2ZjdkShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdf
|
||||
bGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgDShsKFWNyZXdfbnVtYmVy
|
||||
X29mX2FnZW50cxICGAJKiAUKC2NyZXdfYWdlbnRzEvgECvUEW3siaWQiOiAiMTBmODVhZGQtYWVm
|
||||
MC00ODI4LTg2ZjgtYWIwNjQzMTE4NTYzIiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlf
|
||||
ZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhf
|
||||
cnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2Rl
|
||||
bF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJD
|
||||
aGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVz
|
||||
IjogW119LCB7ImlkIjogIjM2MWQyY2Y3LTQ4MzMtNDVlOS05MzBhLWVlMGQ1YWNjNGZlYyIsICJy
|
||||
b2xlIjogIlNlbmlvciBXcml0ZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/
|
||||
IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwg
|
||||
ImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVt
|
||||
cGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9u
|
||||
X2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KgQMKCmNyZXdfdGFza3MS8gIK
|
||||
7wJbeyJpZCI6ICJlNDYyZTczZS04ZTk1LTQxNzktOTM2NC0wMjAyMGMyZDYwNDAiLCAiYXN5bmNf
|
||||
ZXhlY3V0aW9uPyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFt
|
||||
ZXMiOiBbXX0sIHsiaWQiOiAiYWZmYjBlYTYtYjBjOC00ZTE2LWIzYjItZWNhMmZhN2YzZDMxIiwg
|
||||
ImFzeW5jX2V4ZWN1dGlvbj8iOiB0cnVlLCAiYWdlbnRfcm9sZSI6ICJSZXNlYXJjaGVyIiwgInRv
|
||||
b2xzX25hbWVzIjogW119LCB7ImlkIjogIjQ0NWIyNGI4LTQ2NzAtNDIzYS04Mzk5LTdjMjA4NzE5
|
||||
MzE1NCIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNlbmlvciBX
|
||||
cml0ZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJt
|
||||
NjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5
|
||||
c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNp
|
||||
b24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44
|
||||
MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAES5AcKENs2IxIKRMt9JVAA
|
||||
RURPdEQSCJoAf6jlLZzTKgxDcmV3IENyZWF0ZWQwATmgYxmAgmi1F0GQsxqAgmi1F0oaCg5jcmV3
|
||||
YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdf
|
||||
aWQSJgokNDY2OGVmMTAtYmIwMi00YTZhLWIwZGQtMGEyNmVhNGIzMjU1ShwKDGNyZXdfcHJvY2Vz
|
||||
cxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2Zf
|
||||
dGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFKzAIKC2NyZXdfYWdlbnRzErwC
|
||||
CrkCW3siaWQiOiAiOWRlNTk3ZGYtNmViNy00NGI5LWI1OGEtNGUxZjIyZjA0NGU4IiwgInJvbGUi
|
||||
OiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxz
|
||||
ZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjog
|
||||
IntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVy
|
||||
ZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxl
|
||||
ZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUqJAQoKY3Jld190YXNrcxJ7CnlbeyJpZCI6
|
||||
ICI5MzI0YWFiYi1hYTE1LTRmMmUtYjUyMi1lZjJhYjQyYWM1YjUiLCAiYXN5bmNfZXhlY3V0aW9u
|
||||
PyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJlc2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1d
|
||||
SigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3Jl
|
||||
bGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1f
|
||||
dmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMw
|
||||
OjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoK
|
||||
CgRjcHVzEgIYDHoCGAES5AcKEN2W4GxctPgvbUuN50b4LsESCLQyrtF1qM7cKgxDcmV3IENyZWF0
|
||||
ZWQwATkouKCAgmi1F0Eg3aGAgmi1F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0
|
||||
aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQSJgokZDA1ZWUzNzctMDZhYy00YjU0LWJj
|
||||
NDAtY2I1NmZiMDYwOTA4ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFu
|
||||
Z3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29m
|
||||
X2FnZW50cxICGAFKzAIKC2NyZXdfYWdlbnRzErwCCrkCW3siaWQiOiAiOWM4OTQ1N2EtNDUwMS00
|
||||
ZWJjLThjNzctMTczYzZjN2YzZDIyIiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJtZW1vcnlfZW5h
|
||||
YmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBt
|
||||
IjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9u
|
||||
YW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0
|
||||
T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjog
|
||||
W119XUqJAQoKY3Jld190YXNrcxJ7CnlbeyJpZCI6ICIzOGRiNmY1Yi1lNjFiLTRmMjQtODNlNi03
|
||||
MWM2MDcyOGU3YTIiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IHRydWUsICJhZ2VudF9yb2xlIjogIlJl
|
||||
c2VhcmNoZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMt
|
||||
YXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3Jt
|
||||
X3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZl
|
||||
cnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAw
|
||||
Mi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAEShAgKEC8jvjWhAsZK
|
||||
yS48nBnpGckSCFKE/i+cC7Q6KgxDcmV3IENyZWF0ZWQwATl4wwyCgmi1F0FgPg6Cgmi1F0oaCg5j
|
||||
cmV3YWlfdmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2Ny
|
||||
ZXdfaWQSJgokMzRkNjkxYjItNzA5ZC00MWMxLTg3OGMtNDRkYjE2M2YzNGRmShwKDGNyZXdfcHJv
|
||||
Y2VzcxIMCgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJf
|
||||
b2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFK2gIKC2NyZXdfYWdlbnRz
|
||||
EsoCCscCW3siaWQiOiAiZTU2NTMwYTQtMmJjOS00ZmQyLThjZWEtMDU2MTcwNzE0MmNiIiwgInJv
|
||||
bGUiOiAidGVzdCByb2xlIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZh
|
||||
bHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0i
|
||||
OiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0
|
||||
dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFi
|
||||
bGVkPyI6IHRydWUsICJ0b29sc19uYW1lcyI6IFsibGVhcm5fYWJvdXRfQUkiXX1dSpsBCgpjcmV3
|
||||
X3Rhc2tzEowBCokBW3siaWQiOiAiN2RlYzgyNDEtYzFiNy00ZWMyLWI4NzgtYjE1YWU3YzZkNjc4
|
||||
IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAidGVzdCByb2xlIiwg
|
||||
InRvb2xzX25hbWVzIjogWyJsZWFybl9hYm91dF9BSSJdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1Mt
|
||||
MTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxh
|
||||
dGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJu
|
||||
ZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51
|
||||
LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYARLdBwoQZ+lX
|
||||
gqUablLbHyMcxzcT0xIIvqfNkVQkSecqDENyZXcgQ3JlYXRlZDABOehDY5CCaLUXQTCvZJCCaLUX
|
||||
ShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjdK
|
||||
MQoHY3Jld19pZBImCiRlZWNjZmE1Yi1jMDRiLTRkMjktOWUwOS01YTNhOWE3YjdlOTZKHAoMY3Jl
|
||||
d19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIECgJlbkoaChRjcmV3X251
|
||||
bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUrIAgoLY3Jld19h
|
||||
Z2VudHMSuAIKtQJbeyJpZCI6ICIyODA2ZjY2ZC0zZmNmLTRmMjgtYTRiOC05NmRiYzAwMmYxNWYi
|
||||
LCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogdHJ1ZSwgInZlcmJvc2U/Ijog
|
||||
ZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogImVuIiwgImxs
|
||||
bSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00XCIsIFwidGVtcGVy
|
||||
YXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxlZ2F0aW9uX2Vu
|
||||
YWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1KhgEKCmNyZXdfdGFza3MSeAp2W3si
|
||||
aWQiOiAiOTc0NDY4OTItZTJkMy00MTdlLWIzMDEtN2IzNWRjNmE4ODBlIiwgImFzeW5jX2V4ZWN1
|
||||
dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVyIiwgInRvb2xzX25hbWVzIjogW119
|
||||
XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9y
|
||||
ZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3Jt
|
||||
X3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjMuMDogV2VkIERlYyAyMCAyMToz
|
||||
MDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEuNX43L1JFTEVBU0VfQVJNNjRfVDYwMzBK
|
||||
CgoEY3B1cxICGAx6AhgBEt0HChC8c5/M8T8hC2aKGlF64RRiEgic9zXLL9VauyoMQ3JldyBDcmVh
|
||||
dGVkMAE58F9ak4JotRdB+Ktbk4JotRdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5
|
||||
dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lkEiYKJGQyZGFkNTVlLWEyODEtNDFiYS05
|
||||
NWQzLTRmODA0Y2FmNGFhNUocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoVCg1jcmV3X2xh
|
||||
bmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9v
|
||||
Zl9hZ2VudHMSAhgBSsgCCgtjcmV3X2FnZW50cxK4Agq1Alt7ImlkIjogIjk3ZDkwOTJiLTY4YjMt
|
||||
NGJkNS04YmE2LWVkMzZmZDEzZjUxOSIsICJyb2xlIjogIlNjb3JlciIsICJtZW1vcnlfZW5hYmxl
|
||||
ZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjog
|
||||
bnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1l
|
||||
XCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3Bl
|
||||
bkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119
|
||||
XUqGAQoKY3Jld190YXNrcxJ4CnZbeyJpZCI6ICIzZGJhMzZmZi04ZjI2LTQwYzItOTNjOC00NzE2
|
||||
ZjZjNjBmZDkiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTY29y
|
||||
ZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRmb3JtEhwKGm1hY09TLTE0LjMtYXJtNjQt
|
||||
YXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMuMy4wShsKD3BsYXRmb3JtX3N5c3Rl
|
||||
bRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVEYXJ3aW4gS2VybmVsIFZlcnNpb24g
|
||||
MjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAyMDIzOyByb290OnhudS0xMDAwMi44MS41
|
||||
fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIYDHoCGAES1QgKEN2BJRDQYY7QWWqSxuHe
|
||||
8p8SCBqqCQSDoyEcKgxDcmV3IENyZWF0ZWQwATlAC02Wgmi1F0Ggck6Wgmi1F0oaCg5jcmV3YWlf
|
||||
dmVyc2lvbhIICgYwLjExLjFKGgoOcHl0aG9uX3ZlcnNpb24SCAoGMy4xMS43SjEKB2NyZXdfaWQS
|
||||
JgokMmNhNzY5ZTYtOTc0My00MjQ3LThhYTAtOWUyZWZjYzhjMDQyShwKDGNyZXdfcHJvY2VzcxIM
|
||||
CgpzZXF1ZW50aWFsShUKDWNyZXdfbGFuZ3VhZ2USBAoCZW5KGgoUY3Jld19udW1iZXJfb2ZfdGFz
|
||||
a3MSAhgCShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFKyAIKC2NyZXdfYWdlbnRzErgCCrUC
|
||||
W3siaWQiOiAiYTg1YjRjYjQtMjY1OS00ODVlLTgzNDUtYTAzNmE0NmQyZDk5IiwgInJvbGUiOiAi
|
||||
U2NvcmVyIiwgIm1lbW9yeV9lbmFibGVkPyI6IHRydWUsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4
|
||||
X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFt
|
||||
ZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAu
|
||||
NywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZh
|
||||
bHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSv4BCgpjcmV3X3Rhc2tzEu8BCuwBW3siaWQiOiAiOTUy
|
||||
OTM0MTQtNWM5My00OTVmLWIxNDgtMzJkY2FjMmZkYWIwIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBm
|
||||
YWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVyIiwgInRvb2xzX25hbWVzIjogW119LCB7ImlkIjog
|
||||
IjhkODZjN2Q3LTEyYzUtNDU1NS04ZjVjLTU0Nzg2MWE2ZjllMSIsICJhc3luY19leGVjdXRpb24/
|
||||
IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0b29sc19uYW1lcyI6IFtdfV1KKAoI
|
||||
cGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFz
|
||||
ZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJz
|
||||
aW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6IFdlZCBEZWMgMjAgMjE6MzA6NTkg
|
||||
UFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxFQVNFX0FSTTY0X1Q2MDMwSgoKBGNw
|
||||
dXMSAhgMegIYARLVCAoQhIazhXGDdJlB1HEcA4mNsRIISN/DVD4ZQJoqDENyZXcgQ3JlYXRlZDAB
|
||||
ObjzgpuCaLUXQXB2hJuCaLUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTEuMUoaCg5weXRob25f
|
||||
dmVyc2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiRmZTQ4YjRlOC0zNWI4LTQzZmEtODBmNS1h
|
||||
MGM5NDM1NmZlMTlKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFn
|
||||
ZRIECgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAJKGwoVY3Jld19udW1iZXJfb2ZfYWdl
|
||||
bnRzEgIYAUrIAgoLY3Jld19hZ2VudHMSuAIKtQJbeyJpZCI6ICIwYzUwZjU1Ny0xZDdkLTQ2Njct
|
||||
OGQ3OC01ZWI4NjBhY2RhZWUiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/Ijog
|
||||
dHJ1ZSwgInZlcmJvc2U/IjogZmFsc2UsICJtYXhfaXRlciI6IDE1LCAibWF4X3JwbSI6IG51bGws
|
||||
ICJpMThuIjogImVuIiwgImxsbSI6ICJ7XCJuYW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBc
|
||||
ImdwdC00XCIsIFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwi
|
||||
fSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1K/gEK
|
||||
CmNyZXdfdGFza3MS7wEK7AFbeyJpZCI6ICIzOWI4N2JhOS1mNDEyLTRkOWQtOGMxYS00MGZlZWZl
|
||||
Y2NkZDUiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJTY29yZXIi
|
||||
LCAidG9vbHNfbmFtZXMiOiBbXX0sIHsiaWQiOiAiNjNmMjM1OTAtNTc0Ny00ZmZhLWE5ODEtYTcx
|
||||
NmZkYmFjYjQ4IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2Nv
|
||||
cmVyIiwgInRvb2xzX25hbWVzIjogW119XUooCghwbGF0Zm9ybRIcChptYWNPUy0xNC4zLWFybTY0
|
||||
LWFybS02NGJpdEocChBwbGF0Zm9ybV9yZWxlYXNlEggKBjIzLjMuMEobCg9wbGF0Zm9ybV9zeXN0
|
||||
ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3JtX3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9u
|
||||
IDIzLjMuMDogV2VkIERlYyAyMCAyMTozMDo1OSBQU1QgMjAyMzsgcm9vdDp4bnUtMTAwMDIuODEu
|
||||
NX43L1JFTEVBU0VfQVJNNjRfVDYwMzBKCgoEY3B1cxICGAx6AhgBEt0HChC4GJyNtkFmtfZXC9Uk
|
||||
/eqvEghHGH/dFF+7ISoMQ3JldyBDcmVhdGVkMAE5aKB7oIJotRdBgBN9oIJotRdKGgoOY3Jld2Fp
|
||||
X3ZlcnNpb24SCAoGMC4xMS4xShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0oxCgdjcmV3X2lk
|
||||
EiYKJDlhNzA2ODA2LTlhOTAtNGZmOC1hYTFjLWZmM2UwNTZhZDc3M0ocCgxjcmV3X3Byb2Nlc3MS
|
||||
DAoKc2VxdWVudGlhbEoVCg1jcmV3X2xhbmd1YWdlEgQKAmVuShoKFGNyZXdfbnVtYmVyX29mX3Rh
|
||||
c2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSsgCCgtjcmV3X2FnZW50cxK4Agq1
|
||||
Alt7ImlkIjogIjdiNmQ1NmI3LWI4MTgtNDU3YS05ZDRlLTkxNmJkMmEzOWQ0NyIsICJyb2xlIjog
|
||||
IlNjb3JlciIsICJtZW1vcnlfZW5hYmxlZD8iOiB0cnVlLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1h
|
||||
eF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiAiZW4iLCAibGxtIjogIntcIm5h
|
||||
bWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRcIiwgXCJ0ZW1wZXJhdHVyZVwiOiAw
|
||||
LjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxlZD8iOiBm
|
||||
YWxzZSwgInRvb2xzX25hbWVzIjogW119XUqGAQoKY3Jld190YXNrcxJ4CnZbeyJpZCI6ICI5MzFh
|
||||
NGI0MS05YTFkLTRkOTAtOWI4MC0zNzAwNTczNGRiYTYiLCAiYXN5bmNfZXhlY3V0aW9uPyI6IGZh
|
||||
bHNlLCAiYWdlbnRfcm9sZSI6ICJTY29yZXIiLCAidG9vbHNfbmFtZXMiOiBbXX1dSigKCHBsYXRm
|
||||
b3JtEhwKGm1hY09TLTE0LjMtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoG
|
||||
MjMuMy4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJn
|
||||
CmVEYXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMy4wOiBXZWQgRGVjIDIwIDIxOjMwOjU5IFBTVCAy
|
||||
MDIzOyByb290OnhudS0xMDAwMi44MS41fjcvUkVMRUFTRV9BUk02NF9UNjAzMEoKCgRjcHVzEgIY
|
||||
DHoCGAE=
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '33179'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.22.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:49:05 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
||||
lines of conversation provided, adding onto the previous summary returning a
|
||||
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
||||
of artificial intelligence. The AI thinks artificial intelligence is a force
|
||||
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
||||
intelligence is a force for good?\nAI: Because artificial intelligence will
|
||||
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
||||
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
||||
is a force for good because it will help humans reach their full potential.\nEND
|
||||
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: Give
|
||||
me an integer score between 1-5 for the following title: ''The impact of AI
|
||||
in the future of work''\nYour final answer must be: The score of the title.\nAI:
|
||||
4\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature":
|
||||
0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1007'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=7sCpUch4zHBRr3bC0ZrXiZ.hp5JDRz10XR.quEnwOrA-1708386544-1.0-Afdz/ZpiEUiaPexcCrxugPwkeqRkeirb63bcWwb2oQP4BxG9mVHI7ouMoBhxJcQysgMju/x4AOA6ugypjle7VW0=;
|
||||
_cfuvid=ekJFOKa47vCR_bxQqig9km7tYBNhHULDxrGlY80MRHE-1708386544507-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1SRS2vDMBCE7/4Viy65JCFpnEd9aw+F3loSSqGUoMhrW42sFdK6aQn570VyHvQi
|
||||
0Hw7y2h0zACELkUBQjWSVevMaNUtK1UdDq/79SG3L+8bfizXs6c3y7lBMYwO2n2h4otrrKh1BlmT
|
||||
7bHyKBnj1ulyspqtFvM8T6ClEk201Y5H+WiymM7Ojoa0wiAK+MgAAI7pjNlsiT+igMnworQYgqxR
|
||||
FNchAOHJREXIEHRgaVkMb1CRZbQp7qZBaLpWWpBhH4AbhIdnYAIvGdOVNRuEQRzUrZOKgao4o23C
|
||||
Vcedx6gdyO8HQBYkBCVN0qaj+Rg2/dJaf2MAzYlT78nH4hzrdH2Podp52sW3286Yq15pq0Oz9SgD
|
||||
2Zg9MLnefsoAPlNv3b8qhPPUOt4y7dGGVP99v0/cvuhGZxfIxNLc9LvpKjsnFOE3MLbbStsavfM6
|
||||
1RhzZqfsDwAA//8DAE3c61s9AgAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 858267800d3200e2-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Mon, 19 Feb 2024 23:49:06 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '1626'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299764'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 47ms
|
||||
x-request-id:
|
||||
- req_2e0618da30f6aa7a43b2698061939da0
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
298
tests/cassettes/test_save_task_pydantic_output.yaml
Normal file
298
tests/cassettes/test_save_task_pydantic_output.yaml
Normal file
@@ -0,0 +1,298 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer.\nYou''re an
|
||||
expert scorer, specialized in scoring titles.\n\nYour personal goal is: Score
|
||||
the titleTo give my final answer use the exact following format:\n\n```\nFinal
|
||||
Answer: [my expected final answer, entire content of my most complete final
|
||||
answer goes here]\n```\nI MUST use these formats, my jobs depends on it!\n\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\nYour final answer must be: The score of the title.\n\n
|
||||
Begin! This is VERY important to you, your job depends on it!\n\n\n"}], "model":
|
||||
"gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '707'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
score"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
impact"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
work"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8x6vV33CeGSNI1HTLmNHUVUpnH6v9","object":"chat.completion.chunk","created":1709098669,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c6515c5f1d010f-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:50 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=yJJOvK.mptdBnSOedMJ87OggCnwj18oNe7zJy.U.wLg-1709098670-1.0-ARWeX/RQGrEmYpidT4d8JhgrjaWyqwMX0lE4/NuXV6wQOwk42JIR5NEmoP9Ne95YgKLnPHrlShLb4NLnDwZ1/6E=;
|
||||
path=/; expires=Wed, 28-Feb-24 06:07:50 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=jQ_UFtYKPdfJP2SPmfRNzj8gnrGG.JmYddiPcNvV0jU-1709098670103-0.0-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '320'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299842'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 31ms
|
||||
x-request-id:
|
||||
- req_b38a6691b43288fba573ff6677ae6769
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "The score of the title ''The
|
||||
impact of AI in the future of work'' is 4."}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '586'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=yJJOvK.mptdBnSOedMJ87OggCnwj18oNe7zJy.U.wLg-1709098670-1.0-ARWeX/RQGrEmYpidT4d8JhgrjaWyqwMX0lE4/NuXV6wQOwk42JIR5NEmoP9Ne95YgKLnPHrlShLb4NLnDwZ1/6E=;
|
||||
_cfuvid=jQ_UFtYKPdfJP2SPmfRNzj8gnrGG.JmYddiPcNvV0jU-1709098670103-0.0-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
ISALACBGs1lPU+1sHVPKF/OzKPbAJT51Krhk0B4UGlOz6RPdgsFcdLfVVrQCBWqtapcf93doJOi+
|
||||
1W3v3QqA05wSzCZJzLRT1cGqt3jrnr+U2WN3Es+z6JPLcbf75EynbLMiANp0VmRxJl4LMVCn1kCQ
|
||||
m8sfFTklmv3GsDEc9PpN4wva5oWiBMcuVjvVRq9Jit6JvxYo8SkAYNsaAfzufZcSjcooEoUhUAK/
|
||||
BNBbVVCCSQjTEBMTWQHk0tZNSphSKeNctFb9ZolS4lANbvu5Q/protRvS+X/p/Gk/7G5vemGx+uD
|
||||
h7OHcnjb0ITombVbFBqBUYpBbXVSMALQJHpDn3rMrC9uy+jKSGEAdPHvKcHtlwG+GDLriy9KdL7M
|
||||
ntSNe9HHvuXxvcQyKjt23qbhwHBmrQN+3xA2RgmGaJ2MtRfAtyerpXOeofNWu/gb7bwwgRLDHiXA
|
||||
ZyYPgEvuDw/kTGaz0RVrJJhG4OR3NDXjwjs/dass9sIAAw==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85c65165bd32010f-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 28 Feb 2024 05:37:51 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '561'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299951'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 9ms
|
||||
x-request-id:
|
||||
- req_c8695af2a256e18f279f7a64f4ed3b1c
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
1331
tests/cassettes/test_task_with_no_arguments.yaml
Normal file
1331
tests/cassettes/test_task_with_no_arguments.yaml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -131,7 +131,7 @@ def test_crew_creation():
|
||||
|
||||
assert (
|
||||
crew.kickoff()
|
||||
== '1. "The Role of AI in Predicting and Managing Pandemics"\nHighlight: \nIn an era where global health crises can emerge from any corner of the world, the role of AI in predicting and managing pandemics has never been more critical. Through intelligent data gathering and predictive analytics, AI can potentially identify the onset of pandemics before they reach critical mass, offering a proactive solution to a reactive problem. This article explores the intersection of AI and epidemiology, delving into how this cutting-edge technology is revolutionizing our approach to global health crises.\n\n2. "AI and the Future of Work: Will Robots Take Our Jobs?"\nHighlight: \nThe rise of AI has sparked both excitement and apprehension about the future of work. Will robots replace us, or will they augment our capabilities? This article delves into the heart of this controversial issue, examining the potential of AI to disrupt job markets, transform industries, and redefine the concept of work. It\'s not just a question of job security—it\'s a discussion about the kind of world we want to live in.\n\n3. "AI in Art and Creativity: A New Frontier in Innovation"\nHighlight: \nArt and creativity, once seen as the exclusive domain of human expression, are being redefined by the advent of AI. From algorithmic compositions to AI-assisted design, this article explores the burgeoning field of AI in art and creativity. It\'s a journey into a new frontier of innovation, one where the lines between human creativity and artificial intelligence blur into an exciting, uncharted territory.\n\n4. "Ethics in AI: Balancing Innovation with Responsibility"\nHighlight: \nAs AI continues to permeate every facet of our lives, questions about its ethical implications grow louder. This article invites readers into a thoughtful exploration of the moral landscape of AI. It challenges us to balance the relentless pursuit of innovation with the weighty responsibilities that come with it, asking: How can we harness the power of AI without losing sight of our human values?\n\n5. "AI in Education: Personalizing Learning for the Next Generation"\nHighlight: \nEducation is poised for a transformation as AI enters the classroom, promising a future where learning is personalized, not generalized. This article delves into how AI can tailor educational experiences to individual learning styles, making education more effective and accessible. It\'s a glimpse into a future where AI is not just a tool for learning, but an active participant in shaping the educational journey of the next generation.'
|
||||
== '1. "The Role of AI in Predictive Analysis"\nHighlight: AI is revolutionizing the way we understand and utilize data through predictive analysis. Complex algorithms can sift through vast amounts of information, predict future trends and assist businesses in making informed decisions. The article will delve into the intricate workings of AI in predictive analysis and how it is shaping industries from healthcare to finance.\n\nNotes: This topic will focus on the business aspect of AI and its transformative role in data analysis. Case studies from different industries can be used to illustrate the impact of AI in predictive analysis.\n\n2. "The Intersection of AI and Quantum Computing"\nHighlight: As we stand at the crossroads of AI and quantum computing, there’s an unprecedented potential for breakthroughs in processing speed and problem-solving capabilities. This article will explore this exciting intersection, revealing how the fusion of these two technologies can push the boundaries of what\'s possible.\n\nNotes: The article will provide a detailed overview of quantum computing and how its integration with AI can revolutionize various sectors. Real-world applications and future predictions will be included.\n\n3. "AI for Sustainable Development"\nHighlight: In an era where sustainability is a global priority, AI is emerging as a powerful tool in progressing towards this goal. From optimizing resource use to monitoring environmental changes, AI\'s role in sustainable development is multifaceted and transformative. This article will shed light on how AI is being utilized to promote a more sustainable future.\n\nNotes: This topic will delve into the environmental aspect of AI and its potential in promoting sustainable development. Examples of AI applications in different environmental contexts will be provided.\n\n4. "Ethical Implications of AI"\nHighlight: As AI permeates our society, it brings along a host of ethical dilemmas. From privacy concerns to accountability, the ethical implications of AI are as complex as they are critical. This article will take a deep dive into the ethical landscape of AI, exploring the pressing issues and potential solutions.\n\nNotes: This topic will take a philosophical and ethical approach, discussing the moral implications of AI use and how they can be mitigated. It will include a wide range of perspectives from experts in the field.\n\n5. "AI in Art and Creativity"\nHighlight: The world of art is no stranger to the transformative power of AI. From creating original artworks to enhancing creative processes, AI is redefining the boundaries of art and creativity. This article will take you on a journey through the fascinating intersection of AI and creativity, showcasing the revolutionary impact of this technology in the art world.\n\nNotes: This article will explore the artistic side of AI, discussing how it\'s being used in various creative fields. It will feature interviews with artists and creators who are harnessing the power of AI in their work.'
|
||||
)
|
||||
|
||||
|
||||
@@ -152,7 +152,7 @@ def test_hierarchical_process():
|
||||
|
||||
assert (
|
||||
crew.kickoff()
|
||||
== """Here are the five interesting ideas for articles with their respective highlights:\n\n1. The Role of AI in Climate Change: As the world grapples with the existential threat of climate change, artificial intelligence (AI) has emerged as a powerful ally in our battle against it. The article will explore how AI is being used to predict weather patterns, optimize renewable energy sources, and even capture and reduce greenhouse emissions. This novel intersection of technology and environment could hold the key to a sustainable future, making this a must-read for anyone interested in the potential of AI to transform our world.\n\n2. AI and Mental Health: With the increasing prevalence of mental health issues worldwide, innovative solutions are needed more than ever. This article will delve into the cutting-edge domain of AI and mental health, exploring how machine learning algorithms are helping to diagnose conditions, personalize treatments, and even predict the onset of mental disorders. This exploration of AI's potential in mental health not only sheds light on the future of healthcare but also opens a dialogue on the ethical considerations involved.\n\n3. The Ethical Implications of AI: As AI continues to permeate our lives, it brings with it a host of ethical considerations. This article will unravel the complex ethical terrain of AI, from issues of privacy and consent to its potential for bias and discrimination. By diving into the philosophical underpinnings of AI and its societal implications, this article will provoke thought and stimulate discussion on how we can ensure a fair and equitable AI-enabled future.\n\n4. How AI is Revolutionizing E-commerce: In the fiercely competitive world of e-commerce, AI is proving to be a game-changer. This article will take you on a journey through the world of AI-enhanced e-commerce, showcasing how machine learning algorithms are optimizing logistics, personalizing shopping experiences, and even predicting consumer behavior. This deep dive into AI's transformative impact on e-commerce is a must-read for anyone interested in the future of business and technology.\n\n5. AI in Space Exploration: The final frontier of space exploration is being redefined by the advent of AI. This article will take you on an interstellar journey through the role of AI in space exploration, from autonomous spacecraft navigation to the search for extraterrestrial life. By peering into the cosmos through the lens of AI, this article offers a glimpse into the future of space exploration and the infinite possibilities that AI holds."""
|
||||
== "Here are the five interesting ideas for our next article along with a captivating paragraph for each:\n\n1. 'AI and Climate Change: A New Hope for Sustainability':\nIn a world where climate change is a pressing concern, Artificial Intelligence (AI) offers a glimmer of hope. This article will delve into how AI's predictive capabilities and data analysis can aid in sustainability efforts, from optimizing energy consumption to predicting extreme weather patterns. Through real-world examples and expert insights, we'll explore the innovative solutions AI is bringing to the fight against climate change.\n\n2. 'AI in Art: How Neural Networks are Revolutionizing the Artistic Landscape':\nArtificial Intelligence is not just for the tech-savvy; it's making waves in the art world too. This article will unveil how AI and Neural Networks are transforming the artistic landscape, creating a new genre of AI-art. From AI that can replicate the style of famous artists to AI that creates entirely original pieces, we will delve into this fascinating intersection of technology and creativity.\n\n3. 'The Role of AI in the Post-Covid World':\nThe global pandemic has drastically altered our world, and AI has played a pivotal role in this transformation. In this article, we'll explore how AI has been instrumental in everything from predicting the virus's spread to accelerating vaccine development. We'll also look ahead to the post-Covid world, investigating the lasting changes that AI will bring about in our societies.\n\n4. 'Demystifying AI: Breaking Down Complex AI Concepts for the Everyday Reader':\nArtificial Intelligence can seem like a complex and intimidating subject, but it doesn't have to be. This article aims to demystify AI, breaking down complex concepts into understandable nuggets of information. Whether you're an AI novice or a tech enthusiast, this article will enrich your understanding of AI and its impact on our lives.\n\n5. 'The Ethical Dilemmas of AI: Balancing Innovation and Humanity':\nAs AI continues to advance, it brings along a host of ethical dilemmas. This article will delve into the heart of these issues, discussing the balance between innovation and humanity. From the potential for bias in AI algorithms to the implications of autonomous machines, we'll explore the ethical implications of AI in our society."
|
||||
)
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@ def test_crew_with_delegating_agents():
|
||||
|
||||
assert (
|
||||
crew.kickoff()
|
||||
== "The Senior Writer has produced a fantastic 4 paragraph article on AI:\n\n\"Artificial Intelligence, or AI, is often considered the stuff of science fiction, but it is very much a reality in today's world. In simplest terms, AI is a branch of computer science that aims to create machines that mimic human intelligence - think self-driving cars, voice assistants like Siri or Alexa, even your Netflix recommendations. These are all examples of AI in action, silently making our lives easier and more efficient.\n\nThe applications of AI are as vast as our imagination. In healthcare, AI is used to predict diseases and personalize patient care. In finance, algorithms can analyze market trends and make investment decisions. The education sector uses AI to customize learning and identify areas where students need help. Even in creative fields like music and art, AI is making its mark by creating new pieces that are hard to distinguish from those made by humans.\n\nAI's potential for the future is staggering. As technology advances, so too does the complexity and capabilities of AI. It's predicted that AI will play a significant role in tackling some of humanity's biggest challenges, such as climate change and global health crises. Imagine AI systems predicting natural disasters with enough time for us to take preventative measures, or developing new, effective treatments for diseases through data analysis.\n\nHowever, this brave new world does not come without its challenges. Ethical issues are at the forefront, with concerns over privacy and the potential misuse of AI. There's also the question of job displacement due to automation, and the need for laws and regulations to keep pace with this rapidly advancing technology. Despite these hurdles, the promise of AI and its ability to transform our world is an exciting prospect, one that we are only just beginning to explore.\""
|
||||
== "In today's technological landscape, Artificial Intelligence (AI) agents have emerged as key players in shaping the future of various industries. These agents, which are essentially computer programs that can learn, adapt, and operate autonomously, are a testament to the rapidly evolving capabilities of AI. They are the harbingers of a new era, where machines can mimic human intelligence, and in some cases, even surpass it.\n\nAI agents are transforming the way we engage with technology, enabling a more personalized and efficient user experience. They are extensively used in areas like customer service, where chatbots can handle customer inquiries without human intervention. They have revolutionized sectors like healthcare, where AI agents can analyze patient data to predict health trends and provide personalized treatment recommendations. \n\nHowever, as AI agents continue to evolve, they also pose significant ethical and regulatory challenges. There are concerns about privacy, bias, and the potential misuse of these technologies. As a society, it's crucial to establish norms and regulations that ensure the responsible use of AI agents, balancing their benefits with potential risks.\n\nIn conclusion, AI agents are a transformative technology that is reshaping our world. The challenges they present are complex, but the opportunities they offer are immense. As we continue to explore and understand this technology, we can harness its potential to create a more efficient, personalized, and intelligent future."
|
||||
)
|
||||
|
||||
|
||||
@@ -299,9 +299,10 @@ def test_api_calls_throttling(capsys):
|
||||
from unittest.mock import patch
|
||||
|
||||
from langchain.tools import tool
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
@tool
|
||||
def get_final_answer(numbers) -> float:
|
||||
def get_final_answer(anything) -> float:
|
||||
"""Get the final answer but don't give it yet, just re-use this
|
||||
tool non-stop."""
|
||||
return 42
|
||||
@@ -313,6 +314,7 @@ def test_api_calls_throttling(capsys):
|
||||
max_iter=5,
|
||||
allow_delegation=False,
|
||||
verbose=True,
|
||||
llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
)
|
||||
|
||||
task = Task(
|
||||
@@ -416,10 +418,10 @@ def test_async_task_execution():
|
||||
start.return_value = thread
|
||||
with patch.object(threading.Thread, "join", wraps=thread.join()) as join:
|
||||
list_ideas.output = TaskOutput(
|
||||
description="A 4 paragraph article about AI.", result="ok"
|
||||
description="A 4 paragraph article about AI.", raw_output="ok"
|
||||
)
|
||||
list_important_history.output = TaskOutput(
|
||||
description="A 4 paragraph article about AI.", result="ok"
|
||||
description="A 4 paragraph article about AI.", raw_output="ok"
|
||||
)
|
||||
crew.kickoff()
|
||||
start.assert_called()
|
||||
@@ -501,7 +503,7 @@ def test_crew_function_calling_llm():
|
||||
from langchain.tools import tool
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
llm = ChatOpenAI(model="gpt-3.5")
|
||||
llm = ChatOpenAI(model="gpt-3.5-turbo-0125")
|
||||
|
||||
with patch.object(llm.client, "create", wraps=llm.client.create) as private_mock:
|
||||
|
||||
@@ -514,6 +516,7 @@ def test_crew_function_calling_llm():
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
tools=[learn_about_AI],
|
||||
)
|
||||
|
||||
@@ -522,8 +525,149 @@ def test_crew_function_calling_llm():
|
||||
agent=agent1,
|
||||
)
|
||||
tasks = [essay]
|
||||
print(agent1.function_calling_llm)
|
||||
crew = Crew(agents=[agent1], tasks=tasks, function_calling_llm=llm)
|
||||
print(agent1.function_calling_llm)
|
||||
crew.kickoff()
|
||||
private_mock.assert_called()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_task_with_no_arguments():
|
||||
from langchain.tools import tool
|
||||
|
||||
@tool
|
||||
def return_data() -> str:
|
||||
"Useful to get the sales related data"
|
||||
return "January: 5, February: 10, March: 15, April: 20, May: 25"
|
||||
|
||||
researcher = Agent(
|
||||
role="Researcher",
|
||||
goal="Make the best research and analysis on content about AI and AI agents",
|
||||
backstory="You're an expert researcher, specialized in technology, software engineering, AI and startups. You work as a freelancer and is now working on doing research and analysis for a new customer.",
|
||||
tools=[return_data],
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Look at the available data nd give me a sense on the total number of sales.",
|
||||
agent=researcher,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[researcher], tasks=[task])
|
||||
|
||||
result = crew.kickoff()
|
||||
assert result == "The total number of sales from January to May is 75."
|
||||
|
||||
|
||||
def test_delegation_is_not_enabled_if_there_are_only_one_agent():
|
||||
from unittest.mock import patch
|
||||
|
||||
researcher = Agent(
|
||||
role="Researcher",
|
||||
goal="Make the best research and analysis on content about AI and AI agents",
|
||||
backstory="You're an expert researcher, specialized in technology, software engineering, AI and startups. You work as a freelancer and is now working on doing research and analysis for a new customer.",
|
||||
allow_delegation=True,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Look at the available data nd give me a sense on the total number of sales.",
|
||||
agent=researcher,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[researcher], tasks=[task])
|
||||
|
||||
with patch.object(Task, "execute") as execute:
|
||||
execute.return_value = "ok"
|
||||
crew.kickoff()
|
||||
assert task.tools == []
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agents_do_not_get_delegation_tools_with_there_is_only_one_agent():
|
||||
agent = Agent(
|
||||
role="Researcher",
|
||||
goal="Be super empathetic.",
|
||||
backstory="You're love to sey howdy.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(description="say howdy", expected_output="Howdy!", agent=agent)
|
||||
|
||||
crew = Crew(agents=[agent], tasks=[task])
|
||||
|
||||
result = crew.kickoff()
|
||||
assert result == "Howdy!"
|
||||
assert len(agent.tools) == 0
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_usage_metrics_are_captured_for_sequential_process():
|
||||
agent = Agent(
|
||||
role="Researcher",
|
||||
goal="Be super empathetic.",
|
||||
backstory="You're love to sey howdy.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(description="say howdy", expected_output="Howdy!", agent=agent)
|
||||
|
||||
crew = Crew(agents=[agent], tasks=[task])
|
||||
|
||||
result = crew.kickoff()
|
||||
assert result == "Howdy!"
|
||||
assert crew.usage_metrics == {
|
||||
"completion_tokens": 8,
|
||||
"prompt_tokens": 103,
|
||||
"successful_requests": 1,
|
||||
"total_tokens": 111,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_usage_metrics_are_captured_for_hierarchical_process():
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
agent = Agent(
|
||||
role="Researcher",
|
||||
goal="Be super empathetic.",
|
||||
backstory="You're love to sey howdy.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(description="say howdy", expected_output="Howdy!")
|
||||
|
||||
crew = Crew(
|
||||
agents=[agent],
|
||||
tasks=[task],
|
||||
process=Process.hierarchical,
|
||||
manager_llm=ChatOpenAI(temperature=0, model="gpt-4"),
|
||||
)
|
||||
|
||||
result = crew.kickoff()
|
||||
assert result == "Howdy!"
|
||||
assert crew.usage_metrics == {
|
||||
"total_tokens": 1365,
|
||||
"prompt_tokens": 1256,
|
||||
"completion_tokens": 109,
|
||||
"successful_requests": 3,
|
||||
}
|
||||
|
||||
|
||||
def test_crew_inputs_interpolate_both_agents_and_tasks():
|
||||
agent = Agent(
|
||||
role="{topic} Researcher",
|
||||
goal="Express hot takes on {topic}.",
|
||||
backstory="You have a lot of experience with {topic}.",
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an analysis around {topic}.",
|
||||
expected_output="{points} bullet points about {topic}.",
|
||||
)
|
||||
|
||||
crew = Crew(agents=[agent], tasks=[task], inputs={"topic": "AI", "points": 5})
|
||||
|
||||
assert crew.tasks[0].description == "Give me an analysis around AI."
|
||||
assert crew.tasks[0].expected_output == "5 bullet points about AI."
|
||||
assert crew.agents[0].role == "AI Researcher"
|
||||
assert crew.agents[0].goal == "Express hot takes on AI."
|
||||
assert crew.agents[0].backstory == "You have a lot of experience with AI."
|
||||
|
||||
@@ -2,8 +2,11 @@
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from crewai.agent import Agent
|
||||
from crewai.task import Task
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
from pydantic_core import ValidationError
|
||||
|
||||
from crewai import Agent, Crew, Process, Task
|
||||
|
||||
|
||||
def test_task_tool_reflect_agent_tools():
|
||||
@@ -136,3 +139,311 @@ def test_async_execution():
|
||||
with patch.object(Agent, "execute_task", return_value="ok") as execute:
|
||||
task.execute(agent=researcher)
|
||||
execute.assert_called_once_with(task=task, context=None, tools=[])
|
||||
|
||||
|
||||
def test_multiple_output_type_error():
|
||||
class Output(BaseModel):
|
||||
field: str
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
Task(
|
||||
description="Give me a list of 5 interesting ideas to explore for na article, what makes them unique and interesting.",
|
||||
expected_output="Bullet point list of 5 interesting ideas.",
|
||||
output_json=Output,
|
||||
output_pydantic=Output,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_output_pydantic():
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_pydantic=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task])
|
||||
result = crew.kickoff()
|
||||
assert isinstance(result, ScoreOutput)
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_output_json():
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_json=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task])
|
||||
result = crew.kickoff()
|
||||
assert '{\n "score": 4\n}' == result
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_output_pydantic_to_another_task():
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
function_calling_llm=ChatOpenAI(model="gpt-3.5-turbo-0125"),
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
task1 = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_pydantic=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
task2 = Task(
|
||||
description="Given the score the title 'The impact of AI in the future of work' got, give me an integer score between 1-5 for the following title: 'Return of the Jedi', you MUST give it a score, use your best judgment",
|
||||
expected_output="The score of the title.",
|
||||
output_pydantic=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task1, task2], verbose=2)
|
||||
result = crew.kickoff()
|
||||
assert 4 == result.score
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_output_json_to_another_task():
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task1 = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_json=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
task2 = Task(
|
||||
description="Given the score the title 'The impact of AI in the future of work' got, give me an integer score between 1-5 for the following title: 'Return of the Jedi'",
|
||||
expected_output="The score of the title.",
|
||||
output_json=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task1, task2])
|
||||
result = crew.kickoff()
|
||||
assert '{\n "score": 5\n}' == result
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_save_task_output():
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_file="score.json",
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task])
|
||||
|
||||
with patch.object(Task, "_save_file") as save_file:
|
||||
save_file.return_value = None
|
||||
crew.kickoff()
|
||||
save_file.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_save_task_json_output():
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_file="score.json",
|
||||
output_json=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task])
|
||||
|
||||
with patch.object(Task, "_save_file") as save_file:
|
||||
save_file.return_value = None
|
||||
crew.kickoff()
|
||||
save_file.assert_called_once_with('{\n "score": 4\n}')
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_save_task_pydantic_output():
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_file="score.json",
|
||||
output_pydantic=ScoreOutput,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task])
|
||||
|
||||
with patch.object(Task, "_save_file") as save_file:
|
||||
save_file.return_value = None
|
||||
crew.kickoff()
|
||||
save_file.assert_called_once_with('{"score":4}')
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_increment_delegations_for_hierarchical_process():
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
agents=[scorer],
|
||||
tasks=[task],
|
||||
process=Process.hierarchical,
|
||||
manager_llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
)
|
||||
|
||||
with patch.object(Task, "increment_delegations") as increment_delegations:
|
||||
increment_delegations.return_value = None
|
||||
crew.kickoff()
|
||||
increment_delegations.assert_called_once
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_increment_delegations_for_sequential_process():
|
||||
pass
|
||||
|
||||
manager = Agent(
|
||||
role="Manager",
|
||||
goal="Coordinate scoring processes",
|
||||
backstory="You're great at delegating work about scoring.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
agent=manager,
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
agents=[manager, scorer],
|
||||
tasks=[task],
|
||||
process=Process.sequential,
|
||||
)
|
||||
|
||||
with patch.object(Task, "increment_delegations") as increment_delegations:
|
||||
increment_delegations.return_value = None
|
||||
crew.kickoff()
|
||||
increment_delegations.assert_called_once
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_increment_tool_errors():
|
||||
from crewai_tools import tool
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
@tool
|
||||
def scoring_examples() -> None:
|
||||
"Useful examples for scoring titles."
|
||||
raise Exception("Error")
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
tools=[scoring_examples],
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work', check examples to based your evaluation.",
|
||||
expected_output="The score of the title.",
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
agents=[scorer],
|
||||
tasks=[task],
|
||||
process=Process.hierarchical,
|
||||
manager_llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
)
|
||||
|
||||
with patch.object(Task, "increment_tools_errors") as increment_tools_errors:
|
||||
increment_tools_errors.return_value = None
|
||||
crew.kickoff()
|
||||
increment_tools_errors.assert_called_once
|
||||
|
||||
Reference in New Issue
Block a user