import os from anthropic import AnthropicBedrock from langchain_aws.chat_models import ChatBedrockConverse from langchain_aws.llms.bedrock import BedrockLLM # bedrock_llm1 = BedrockLLM( # aws_access_key_id = os.environ["AWS_ACCESS_KEY_ID"], # aws_secret_access_key = os.environ["AWS_SECRET_ACCESS_KEY"], # region_name = "eu-west-1", # provider = "mistral", # model_id = "mistral.mistral-large-2402-v1:0", # streaming = True, # model_kwargs = {"temperature": 0.7}, # ) # Initialize the streaming Bedrock chat model bedrock_llm = ChatBedrockConverse( aws_access_key_id =os.environ.get("AWS_ACCESS_KEY_ID"), aws_secret_access_key =os.environ.get("AWS_SECRET_ACCESS_KEY"), region_name =os.environ.get("AWS_DEFAULT_REGION", "eu-west-1"), provider = os.environ.get("PROVIDER", "mistral"), model_id =os.environ.get("MODEL_ID", "mistral.mistral-large-2402-v1:0"), # or your preferred Bedrock model temperature= 0.7)