from openai import OpenAI from params import OPENAI_MODEL, OPENAI_API_KEY import llamanet # Initialize LlamaNet llamanet.run("start", "https://huggingface.co/arcee-ai/Arcee-Spark-GGUF/blob/main/Arcee-Spark-IQ4_XS.gguf") # Create an instance of the OpenAI class client = OpenAI(api_key=OPENAI_API_KEY) def send_to_chatgpt(msg_list): try: completion = client.chat.completions.create( model=OPENAI_MODEL, messages=msg_list, temperature=0.6, stream=True ) chatgpt_response = "" for chunk in completion: if chunk.choices[0].delta.content is not None: chatgpt_response += chunk.choices[0].delta.content # Note: Usage information might not be available with LlamaNet chatgpt_usage = None return chatgpt_response, chatgpt_usage except Exception as e: print(f"Error in send_to_chatgpt: {str(e)}") return f"Error: {str(e)}", None def send_to_llamanet(msg_list): try: # Create a new OpenAI client for LlamaNet (no API key needed) llamanet_client = OpenAI() # Send request to LlamaNet completion = llamanet_client.chat.completions.create( model="gpt-3.5-turbo", # LlamaNet uses this as a placeholder messages=msg_list, stream=True ) llamanet_response = "" for chunk in completion: if chunk.choices[0].delta.content is not None: llamanet_response += chunk.choices[0].delta.content # LlamaNet doesn't provide usage information llamanet_usage = None return llamanet_response, llamanet_usage except Exception as e: print(f"Error in send_to_llamanet: {str(e)}") return f"Error: {str(e)}", None def send_to_llm(provider, msg_list): if provider == "llamanet": return send_to_llamanet(msg_list) elif provider == "openai": return send_to_chatgpt(msg_list) else: raise ValueError(f"Unknown provider: {provider}")