import os import sys import asyncio import logging from azure.identity import ClientSecretCredential from azure.ai.ml import MLClient from azure.ai.ml.entities import OnlineEndpoint, OnlineDeployment from azure.ai.openai import AzureOpenAIClient, ChatClient, SystemChatMessage, UserChatMessage, AssistantChatMessage from transformers import AutoTokenizer from typing import Dict, Any, List import aiohttp from cryptography.fernet import Fernet import gradio as gr import json import torch import psutil import random class EnvironmentManager: """Handles loading and validation of environment variables.""" @staticmethod def load_env_variables() -> Dict[str, str]: required_vars = [ "AZURE_CLIENT_ID", "AZURE_CLIENT_SECRET", "AZURE_TENANT_ID", "AZURE_SUBSCRIPTION_ID", "AZURE_RESOURCE_GROUP", "AZURE_WORKSPACE_NAME", "AZURE_MODEL_ID", "ENCRYPTION_KEY", "AZURE_OPENAI_ENDPOINT" ] env_vars = {var: os.getenv(var) for var in required_vars} missing_vars = [var for var, value in env_vars.items() if not value] if missing_vars: raise ValueError(f"Missing required environment variables: {', '.join(missing_vars)}") return env_vars class AzureClient: """Sets up the Azure ML and OpenAI Clients with required credentials.""" def __init__(self, env_vars: Dict[str, str]): self.env_vars = env_vars self.ml_client = None self.openai_client = None def authenticate(self): try: credential = ClientSecretCredential( client_id=self.env_vars["AZURE_CLIENT_ID"], client_secret=self.env_vars["AZURE_CLIENT_SECRET"], tenant_id=self.env_vars["AZURE_TENANT_ID"] ) self.ml_client = MLClient( credential, subscription_id=self.env_vars["AZURE_SUBSCRIPTION_ID"], resource_group=self.env_vars["AZURE_RESOURCE_GROUP"], workspace_name=self.env_vars["AZURE_WORKSPACE_NAME"] ) self.openai_client = AzureOpenAIClient( endpoint=self.env_vars["AZURE_OPENAI_ENDPOINT"], credential=credential ) logging.info("Azure authentication successful.") except Exception as e: logging.error(f"Azure authentication failed: {e}") sys.exit(1) class AICore: """Main AI Core system integrating Azure OpenAI chat functionality.""" def __init__(self, env_vars: Dict[str, str]): self.env_vars = env_vars self.encryption_manager = EncryptionManager(env_vars["ENCRYPTION_KEY"]) self.openai_client = AzureOpenAIClient( endpoint=self.env_vars["AZURE_OPENAI_ENDPOINT"], credential=ClientSecretCredential( client_id=self.env_vars["AZURE_CLIENT_ID"], client_secret=self.env_vars["AZURE_CLIENT_SECRET"], tenant_id=self.env_vars["AZURE_TENANT_ID"] ) ) self.chat_client = self.openai_client.get_chat_client(self.env_vars["AZURE_MODEL_ID"]) async def generate_response(self, query: str) -> Dict[str, Any]: try: encrypted_query = self.encryption_manager.encrypt(query) chat_completion = self.chat_client.complete_chat([ SystemChatMessage("You are a helpful AI assistant."), UserChatMessage(query) ]) model_response = chat_completion.content[0].text return { "encrypted_query": encrypted_query, "model_response": model_response } except Exception as e: logging.error(f"Error during response generation: {e}") return {"error": "Failed to generate response"} # Main Application def main(): logging.basicConfig(level=logging.INFO) try: env_vars = EnvironmentManager.load_env_variables() azure_client = AzureClient(env_vars) azure_client.authenticate() ai_core = AICore(env_vars) # Example Gradio interface async def async_respond(message: str) -> str: response_data = await ai_core.generate_response(message) return response_data.get("model_response", "Error: Response not available") def respond(message: str) -> str: return asyncio.run(async_respond(message)) interface = gr.Interface( fn=respond, inputs="text", outputs="text", title="Advanced AI Chat Interface" ) interface.launch(share=True) except Exception as e: logging.error(f"Application initialization failed: {e}") sys.exit(1) if __name__ == "__main__": main()