File size: 4,791 Bytes
3b6941d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import os
import sys
import asyncio
import logging
from azure.identity import ClientSecretCredential
from azure.ai.ml import MLClient
from azure.ai.ml.entities import OnlineEndpoint, OnlineDeployment
from azure.ai.openai import AzureOpenAIClient, ChatClient, SystemChatMessage, UserChatMessage, AssistantChatMessage
from transformers import AutoTokenizer
from typing import Dict, Any, List
import aiohttp
from cryptography.fernet import Fernet
import gradio as gr
import json
import torch
import psutil
import random

class EnvironmentManager:
    """Handles loading and validation of environment variables."""
    @staticmethod
    def load_env_variables() -> Dict[str, str]:
        required_vars = [
            "AZURE_CLIENT_ID", "AZURE_CLIENT_SECRET", "AZURE_TENANT_ID",
            "AZURE_SUBSCRIPTION_ID", "AZURE_RESOURCE_GROUP", "AZURE_WORKSPACE_NAME",
            "AZURE_MODEL_ID", "ENCRYPTION_KEY", "AZURE_OPENAI_ENDPOINT"
        ]

        env_vars = {var: os.getenv(var) for var in required_vars}
        missing_vars = [var for var, value in env_vars.items() if not value]

        if missing_vars:
            raise ValueError(f"Missing required environment variables: {', '.join(missing_vars)}")

        return env_vars

class AzureClient:
    """Sets up the Azure ML and OpenAI Clients with required credentials."""
    def __init__(self, env_vars: Dict[str, str]):
        self.env_vars = env_vars
        self.ml_client = None
        self.openai_client = None

    def authenticate(self):
        try:
            credential = ClientSecretCredential(
                client_id=self.env_vars["AZURE_CLIENT_ID"],
                client_secret=self.env_vars["AZURE_CLIENT_SECRET"],
                tenant_id=self.env_vars["AZURE_TENANT_ID"]
            )
            self.ml_client = MLClient(
                credential,
                subscription_id=self.env_vars["AZURE_SUBSCRIPTION_ID"],
                resource_group=self.env_vars["AZURE_RESOURCE_GROUP"],
                workspace_name=self.env_vars["AZURE_WORKSPACE_NAME"]
            )
            self.openai_client = AzureOpenAIClient(
                endpoint=self.env_vars["AZURE_OPENAI_ENDPOINT"],
                credential=credential
            )
            logging.info("Azure authentication successful.")
        except Exception as e:
            logging.error(f"Azure authentication failed: {e}")
            sys.exit(1)

class AICore:
    """Main AI Core system integrating Azure OpenAI chat functionality."""
    def __init__(self, env_vars: Dict[str, str]):
        self.env_vars = env_vars
        self.encryption_manager = EncryptionManager(env_vars["ENCRYPTION_KEY"])
        self.openai_client = AzureOpenAIClient(
            endpoint=self.env_vars["AZURE_OPENAI_ENDPOINT"],
            credential=ClientSecretCredential(
                client_id=self.env_vars["AZURE_CLIENT_ID"],
                client_secret=self.env_vars["AZURE_CLIENT_SECRET"],
                tenant_id=self.env_vars["AZURE_TENANT_ID"]
            )
        )
        self.chat_client = self.openai_client.get_chat_client(self.env_vars["AZURE_MODEL_ID"])

    async def generate_response(self, query: str) -> Dict[str, Any]:
        try:
            encrypted_query = self.encryption_manager.encrypt(query)
            chat_completion = self.chat_client.complete_chat([
                SystemChatMessage("You are a helpful AI assistant."),
                UserChatMessage(query)
            ])
            model_response = chat_completion.content[0].text

            return {
                "encrypted_query": encrypted_query,
                "model_response": model_response
            }
        except Exception as e:
            logging.error(f"Error during response generation: {e}")
            return {"error": "Failed to generate response"}

# Main Application
def main():
    logging.basicConfig(level=logging.INFO)
    try:
        env_vars = EnvironmentManager.load_env_variables()

        azure_client = AzureClient(env_vars)
        azure_client.authenticate()

        ai_core = AICore(env_vars)

        # Example Gradio interface
        async def async_respond(message: str) -> str:
            response_data = await ai_core.generate_response(message)
            return response_data.get("model_response", "Error: Response not available")

        def respond(message: str) -> str:
            return asyncio.run(async_respond(message))

        interface = gr.Interface(
            fn=respond,
            inputs="text",
            outputs="text",
            title="Advanced AI Chat Interface"
        )
        interface.launch(share=True)

    except Exception as e:
        logging.error(f"Application initialization failed: {e}")
        sys.exit(1)

if __name__ == "__main__":
    main()