Clean up chat

This commit is contained in:
Brandon Hancock
2025-01-27 12:00:41 -05:00
parent f8f3b10588
commit 45efae8ebb

View File

@@ -1,6 +1,9 @@
import json import json
import platform
import re import re
import sys import sys
import threading
import time
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional, Set, Tuple from typing import Any, Dict, List, Optional, Set, Tuple
@@ -25,11 +28,11 @@ def check_conversational_crews_version(
Check if the installed crewAI version supports conversational crews. Check if the installed crewAI version supports conversational crews.
Args: Args:
crewai_version: The current version of crewAI crewai_version: The current version of crewAI.
pyproject_data: Dictionary containing pyproject.toml data pyproject_data: Dictionary containing pyproject.toml data.
Returns: Returns:
bool: True if version check passes, False otherwise bool: True if version check passes, False otherwise.
""" """
try: try:
if version.parse(crewai_version) < version.parse(MIN_REQUIRED_VERSION): if version.parse(crewai_version) < version.parse(MIN_REQUIRED_VERSION):
@@ -40,7 +43,7 @@ def check_conversational_crews_version(
) )
return False return False
except version.InvalidVersion: except version.InvalidVersion:
click.secho("Invalid crewAI version format detected", fg="red") click.secho("Invalid crewAI version format detected.", fg="red")
return False return False
return True return True
@@ -56,6 +59,7 @@ def run_chat():
if not check_conversational_crews_version(crewai_version, pyproject_data): if not check_conversational_crews_version(crewai_version, pyproject_data):
return return
crew, crew_name = load_crew_and_name() crew, crew_name = load_crew_and_name()
chat_llm = initialize_chat_llm(crew) chat_llm = initialize_chat_llm(crew)
if not chat_llm: if not chat_llm:
@@ -63,26 +67,14 @@ def run_chat():
# Indicate that the crew is being analyzed # Indicate that the crew is being analyzed
click.secho( click.secho(
"\nAnalyzing crew and required inputs - this may take 3 to 30 seconds depending on the complexity of your crew.", "\nAnalyzing crew and required inputs - this may take 3 to 30 seconds "
"depending on the complexity of your crew.",
fg="white", fg="white",
) )
# Function to show loading dots
def show_loading():
import sys
import time
while not loading_complete.is_set():
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(1)
print()
# Start loading indicator # Start loading indicator
import threading
loading_complete = threading.Event() loading_complete = threading.Event()
loading_thread = threading.Thread(target=show_loading) loading_thread = threading.Thread(target=show_loading, args=(loading_complete,))
loading_thread.start() loading_thread.start()
try: try:
@@ -116,6 +108,14 @@ def run_chat():
chat_loop(chat_llm, messages, crew_tool_schema, available_functions) chat_loop(chat_llm, messages, crew_tool_schema, available_functions)
def show_loading(event: threading.Event):
"""Display animated loading dots while processing."""
while not event.is_set():
print(".", end="", flush=True)
time.sleep(1)
print()
def initialize_chat_llm(crew: Crew) -> Optional[LLM]: def initialize_chat_llm(crew: Crew) -> Optional[LLM]:
"""Initializes the chat LLM and handles exceptions.""" """Initializes the chat LLM and handles exceptions."""
try: try:
@@ -149,7 +149,7 @@ def build_system_message(crew_chat_inputs: ChatInputs) -> str:
"Please keep your responses concise and friendly. " "Please keep your responses concise and friendly. "
"If a user asks a question outside the crew's scope, provide a brief answer and remind them of the crew's purpose. " "If a user asks a question outside the crew's scope, provide a brief answer and remind them of the crew's purpose. "
"After calling the tool, be prepared to take user feedback and make adjustments as needed. " "After calling the tool, be prepared to take user feedback and make adjustments as needed. "
"If you are ever unsure about a user's request or need clarification, ask the user for more information." "If you are ever unsure about a user's request or need clarification, ask the user for more information. "
"Before doing anything else, introduce yourself with a friendly message like: 'Hey! I'm here to help you with [crew's purpose]. Could you please provide me with [inputs] so we can get started?' " "Before doing anything else, introduce yourself with a friendly message like: 'Hey! I'm here to help you with [crew's purpose]. Could you please provide me with [inputs] so we can get started?' "
"For example: 'Hey! I'm here to help you with uncovering and reporting cutting-edge developments through thorough research and detailed analysis. Could you please provide me with a topic you're interested in? This will help us generate a comprehensive research report and detailed analysis.'" "For example: 'Hey! I'm here to help you with uncovering and reporting cutting-edge developments through thorough research and detailed analysis. Could you please provide me with a topic you're interested in? This will help us generate a comprehensive research report and detailed analysis.'"
f"\nCrew Name: {crew_chat_inputs.crew_name}" f"\nCrew Name: {crew_chat_inputs.crew_name}"
@@ -168,9 +168,6 @@ def create_tool_function(crew: Crew, messages: List[Dict[str, str]]) -> Any:
def flush_input(): def flush_input():
"""Flush any pending input from the user.""" """Flush any pending input from the user."""
import platform
import sys
if platform.system() == "Windows": if platform.system() == "Windows":
# Windows platform # Windows platform
import msvcrt import msvcrt
@@ -191,44 +188,10 @@ def chat_loop(chat_llm, messages, crew_tool_schema, available_functions):
# Flush any pending input before accepting new input # Flush any pending input before accepting new input
flush_input() flush_input()
click.secho( user_input = get_user_input()
"\nYou (type your message below. Press 'Enter' twice when you're done):", handle_user_input(
fg="blue", user_input, chat_llm, messages, crew_tool_schema, available_functions
) )
user_input_lines = []
while True:
line = input()
if line.strip().lower() == "exit":
click.echo("Exiting chat. Goodbye!")
return
if line == "":
break
user_input_lines.append(line)
user_input = "\n".join(user_input_lines)
if not user_input.strip():
click.echo(
"Empty message. Please provide input or type 'exit' to quit."
)
continue
messages.append({"role": "user", "content": user_input})
# Indicate that assistant is processing
click.echo()
click.secho(
"Assistant is processing your input. Please wait...", fg="green"
)
# Process assistant's response
final_response = chat_llm.call(
messages=messages,
tools=[crew_tool_schema],
available_functions=available_functions,
)
messages.append({"role": "assistant", "content": final_response})
click.secho(f"\nAssistant: {final_response}\n", fg="green")
except KeyboardInterrupt: except KeyboardInterrupt:
click.echo("\nExiting chat. Goodbye!") click.echo("\nExiting chat. Goodbye!")
@@ -238,6 +201,55 @@ def chat_loop(chat_llm, messages, crew_tool_schema, available_functions):
break break
def get_user_input() -> str:
"""Collect multi-line user input with exit handling."""
click.secho(
"\nYou (type your message below. Press 'Enter' twice when you're done):",
fg="blue",
)
user_input_lines = []
while True:
line = input()
if line.strip().lower() == "exit":
return "exit"
if line == "":
break
user_input_lines.append(line)
return "\n".join(user_input_lines)
def handle_user_input(
user_input: str,
chat_llm: LLM,
messages: List[Dict[str, str]],
crew_tool_schema: Dict[str, Any],
available_functions: Dict[str, Any],
) -> None:
if user_input.strip().lower() == "exit":
click.echo("Exiting chat. Goodbye!")
return
if not user_input.strip():
click.echo("Empty message. Please provide input or type 'exit' to quit.")
return
messages.append({"role": "user", "content": user_input})
# Indicate that assistant is processing
click.echo()
click.secho("Assistant is processing your input. Please wait...", fg="green")
# Process assistant's response
final_response = chat_llm.call(
messages=messages,
tools=[crew_tool_schema],
available_functions=available_functions,
)
messages.append({"role": "assistant", "content": final_response})
click.secho(f"\nAssistant: {final_response}\n", fg="green")
def generate_crew_tool_schema(crew_inputs: ChatInputs) -> dict: def generate_crew_tool_schema(crew_inputs: ChatInputs) -> dict:
""" """
Dynamically build a Littellm 'function' schema for the given crew. Dynamically build a Littellm 'function' schema for the given crew.
@@ -432,10 +444,10 @@ def generate_input_description_with_ai(input_name: str, crew: Crew, chat_llm) ->
): ):
# Replace placeholders with input names # Replace placeholders with input names
task_description = placeholder_pattern.sub( task_description = placeholder_pattern.sub(
lambda m: m.group(1), task.description lambda m: m.group(1), task.description or ""
) )
expected_output = placeholder_pattern.sub( expected_output = placeholder_pattern.sub(
lambda m: m.group(1), task.expected_output lambda m: m.group(1), task.expected_output or ""
) )
context_texts.append(f"Task Description: {task_description}") context_texts.append(f"Task Description: {task_description}")
context_texts.append(f"Expected Output: {expected_output}") context_texts.append(f"Expected Output: {expected_output}")
@@ -446,10 +458,10 @@ def generate_input_description_with_ai(input_name: str, crew: Crew, chat_llm) ->
or f"{{{input_name}}}" in agent.backstory or f"{{{input_name}}}" in agent.backstory
): ):
# Replace placeholders with input names # Replace placeholders with input names
agent_role = placeholder_pattern.sub(lambda m: m.group(1), agent.role) agent_role = placeholder_pattern.sub(lambda m: m.group(1), agent.role or "")
agent_goal = placeholder_pattern.sub(lambda m: m.group(1), agent.goal) agent_goal = placeholder_pattern.sub(lambda m: m.group(1), agent.goal or "")
agent_backstory = placeholder_pattern.sub( agent_backstory = placeholder_pattern.sub(
lambda m: m.group(1), agent.backstory lambda m: m.group(1), agent.backstory or ""
) )
context_texts.append(f"Agent Role: {agent_role}") context_texts.append(f"Agent Role: {agent_role}")
context_texts.append(f"Agent Goal: {agent_goal}") context_texts.append(f"Agent Goal: {agent_goal}")
@@ -490,18 +502,20 @@ def generate_crew_description_with_ai(crew: Crew, chat_llm) -> str:
for task in crew.tasks: for task in crew.tasks:
# Replace placeholders with input names # Replace placeholders with input names
task_description = placeholder_pattern.sub( task_description = placeholder_pattern.sub(
lambda m: m.group(1), task.description lambda m: m.group(1), task.description or ""
) )
expected_output = placeholder_pattern.sub( expected_output = placeholder_pattern.sub(
lambda m: m.group(1), task.expected_output lambda m: m.group(1), task.expected_output or ""
) )
context_texts.append(f"Task Description: {task_description}") context_texts.append(f"Task Description: {task_description}")
context_texts.append(f"Expected Output: {expected_output}") context_texts.append(f"Expected Output: {expected_output}")
for agent in crew.agents: for agent in crew.agents:
# Replace placeholders with input names # Replace placeholders with input names
agent_role = placeholder_pattern.sub(lambda m: m.group(1), agent.role) agent_role = placeholder_pattern.sub(lambda m: m.group(1), agent.role or "")
agent_goal = placeholder_pattern.sub(lambda m: m.group(1), agent.goal) agent_goal = placeholder_pattern.sub(lambda m: m.group(1), agent.goal or "")
agent_backstory = placeholder_pattern.sub(lambda m: m.group(1), agent.backstory) agent_backstory = placeholder_pattern.sub(
lambda m: m.group(1), agent.backstory or ""
)
context_texts.append(f"Agent Role: {agent_role}") context_texts.append(f"Agent Role: {agent_role}")
context_texts.append(f"Agent Goal: {agent_goal}") context_texts.append(f"Agent Goal: {agent_goal}")
context_texts.append(f"Agent Backstory: {agent_backstory}") context_texts.append(f"Agent Backstory: {agent_backstory}")