diff --git a/docs/concepts/llms.mdx b/docs/concepts/llms.mdx index 851e93085..cf564f2e7 100644 --- a/docs/concepts/llms.mdx +++ b/docs/concepts/llms.mdx @@ -243,6 +243,9 @@ There are three ways to configure LLMs in CrewAI. Choose the method that best fi # llm: bedrock/amazon.titan-text-express-v1 # llm: bedrock/meta.llama2-70b-chat-v1 + # Amazon SageMaker Models - Enterprise-grade + # llm: sagemaker/ + # Mistral Models - Open source alternative # llm: mistral/mistral-large-latest # llm: mistral/mistral-medium-latest @@ -506,6 +509,21 @@ Learn how to get the most out of your LLM configuration: ) ``` + + + ```python Code + AWS_ACCESS_KEY_ID= + AWS_SECRET_ACCESS_KEY= + AWS_DEFAULT_REGION= + ``` + + Example usage: + ```python Code + llm = LLM( + model="sagemaker/" + ) + ``` + ```python Code diff --git a/src/crewai/cli/constants.py b/src/crewai/cli/constants.py index b97b4f208..1d70d0921 100644 --- a/src/crewai/cli/constants.py +++ b/src/crewai/cli/constants.py @@ -63,6 +63,20 @@ ENV_VARS = { "key_name": "AWS_REGION_NAME", }, ], + "sagemaker": [ + { + "prompt": "Enter your AWS Access Key ID (press Enter to skip)", + "key_name": "AWS_ACCESS_KEY_ID", + }, + { + "prompt": "Enter your AWS Secret Access Key (press Enter to skip)", + "key_name": "AWS_SECRET_ACCESS_KEY", + }, + { + "prompt": "Enter your AWS Region Name (press Enter to skip)", + "key_name": "AWS_REGION_NAME", + }, + ], "azure": [ { "prompt": "Enter your Azure deployment name (must start with 'azure/')", @@ -109,6 +123,7 @@ PROVIDERS = [ "ollama", "watson", "bedrock", + "sagemaker", "azure", "cerebras", "sambanova", @@ -239,6 +254,7 @@ MODELS = { "bedrock/mistral.mistral-7b-instruct-v0:2", "bedrock/mistral.mixtral-8x7b-instruct-v0:1", ], + "sagemaker": ["sagemaker/"], "sambanova": [ "sambanova/Meta-Llama-3.3-70B-Instruct", "sambanova/QwQ-32B-Preview",