Spaces:
Sleeping
Sleeping
| import os | |
| from openai import OpenAI | |
| # Set the AI/ML API key and base URL | |
| aiml_api_key = os.getenv("AIML_API_KEY") | |
| base_url = "https://api.aimlapi.com/" | |
| # Initialize the OpenAI client for the AI/ML API | |
| client = OpenAI( | |
| api_key=aiml_api_key, | |
| base_url=base_url | |
| ) | |
| # Function to call the AI/ML API | |
| def call_aiml_api(prompt, max_tokens=2000): | |
| try: | |
| # Call the AI/ML API with the given prompt and max tokens | |
| chat_completion = client.chat.completions.create( | |
| model="o1-mini", | |
| messages=[ | |
| {"role": "user", "content": prompt}, | |
| ], | |
| max_tokens=max_tokens, | |
| ) | |
| # Extract and return the response content | |
| response = chat_completion.choices[0].message.content | |
| return response | |
| except Exception as e: | |
| return f"Error while calling the AI/ML API: {str(e)}" | |
| # Example usage (for testing purposes) | |
| if __name__ == "__main__": | |
| test_prompt = "Explain how backpropagation works in simple terms." | |
| response = call_aiml_api(test_prompt) | |
| print("API Response:", response) | |