Raiff1982 commited on
Commit
a892226
·
verified ·
1 Parent(s): 09f73f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -24
app.py CHANGED
@@ -1,32 +1,25 @@
1
  import os
2
- import sys
3
- import asyncio
4
  import logging
5
- import openai # Correct OpenAI import
6
- from transformers import AutoTokenizer
7
- from typing import Dict, Any, List
8
- import aiohttp
9
- from cryptography.fernet import Fernet
10
  import gradio as gr
11
- import json
12
- import torch
13
- import psutil
14
- import random
 
 
15
 
16
  class EnvironmentManager:
17
- """Handles loading and validation of environment variables."""
18
  @staticmethod
19
  def load_env_variables() -> Dict[str, str]:
20
- required_vars = [
21
- "OPENAI_API_KEY", "ENCRYPTION_KEY"
22
- ]
23
-
24
  env_vars = {var: os.getenv(var) for var in required_vars}
 
25
  missing_vars = [var for var, value in env_vars.items() if not value]
26
-
27
  if missing_vars:
28
- raise ValueError(f"Missing required environment variables: {', '.join(missing_vars)}")
29
-
30
  return env_vars
31
 
32
  class EncryptionManager:
@@ -41,7 +34,7 @@ class EncryptionManager:
41
  return self.cipher.decrypt(encrypted_data.encode()).decode()
42
 
43
  class AICore:
44
- """Main AI Core system integrating OpenAI chat functionality."""
45
  def __init__(self, env_vars: Dict[str, str]):
46
  self.env_vars = env_vars
47
  self.encryption_manager = EncryptionManager(env_vars["ENCRYPTION_KEY"])
@@ -51,8 +44,8 @@ class AICore:
51
  try:
52
  encrypted_query = self.encryption_manager.encrypt(query)
53
 
54
- chat_completion = await openai.ChatCompletion.acreate(
55
- model="gpt-4-turbo", # Ensure you use a valid model name
56
  messages=[
57
  {"role": "system", "content": "You are a helpful AI assistant."},
58
  {"role": "user", "content": query}
@@ -60,6 +53,41 @@ class AICore:
60
  api_key=self.openai_api_key
61
  )
62
 
63
- model_response = chat_completion['choices'][0]['message']['content']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
- return
 
 
1
  import os
2
+ import openai
 
3
  import logging
 
 
 
 
 
4
  import gradio as gr
5
+ import asyncio
6
+ from typing import Dict, Any
7
+ from cryptography.fernet import Fernet
8
+
9
+ # Configure logging
10
+ logging.basicConfig(level=logging.INFO)
11
 
12
  class EnvironmentManager:
13
+ """Handles environment variable validation."""
14
  @staticmethod
15
  def load_env_variables() -> Dict[str, str]:
16
+ required_vars = ["OPENAI_API_KEY", "ENCRYPTION_KEY"]
 
 
 
17
  env_vars = {var: os.getenv(var) for var in required_vars}
18
+
19
  missing_vars = [var for var, value in env_vars.items() if not value]
 
20
  if missing_vars:
21
+ raise ValueError(f"Missing environment variables: {', '.join(missing_vars)}")
22
+
23
  return env_vars
24
 
25
  class EncryptionManager:
 
34
  return self.cipher.decrypt(encrypted_data.encode()).decode()
35
 
36
  class AICore:
37
+ """AI Core system integrating OpenAI's GPT API."""
38
  def __init__(self, env_vars: Dict[str, str]):
39
  self.env_vars = env_vars
40
  self.encryption_manager = EncryptionManager(env_vars["ENCRYPTION_KEY"])
 
44
  try:
45
  encrypted_query = self.encryption_manager.encrypt(query)
46
 
47
+ response = await openai.ChatCompletion.acreate(
48
+ model="ft:gpt-4o-2024-08-06:raiffs-bits:codettev7", # Ensure this model is supported
49
  messages=[
50
  {"role": "system", "content": "You are a helpful AI assistant."},
51
  {"role": "user", "content": query}
 
53
  api_key=self.openai_api_key
54
  )
55
 
56
+ model_response = response["choices"][0]["message"]["content"]
57
+
58
+ return {
59
+ "encrypted_query": encrypted_query,
60
+ "model_response": model_response
61
+ }
62
+ except Exception as e:
63
+ logging.error(f"Error generating response: {e}")
64
+ return {"error": "Failed to generate response"}
65
+
66
+ # Hugging Face Gradio App
67
+ def main():
68
+ try:
69
+ env_vars = EnvironmentManager.load_env_variables()
70
+ ai_core = AICore(env_vars)
71
+
72
+ async def async_respond(message: str) -> str:
73
+ response_data = await ai_core.generate_response(message)
74
+ return response_data.get("model_response", "Error: Response not available")
75
+
76
+ def respond(message: str) -> str:
77
+ return asyncio.run(async_respond(message))
78
+
79
+ interface = gr.Interface(
80
+ fn=respond,
81
+ inputs="text",
82
+ outputs="text",
83
+ title="AI Chatbot - Hugging Face Space"
84
+ )
85
+
86
+ # Use `server_name="0.0.0.0"` to work properly on Hugging Face Spaces
87
+ interface.launch(server_name="0.0.0.0", server_port=7860)
88
+
89
+ except Exception as e:
90
+ logging.error(f"Application failed to start: {e}")
91
 
92
+ if __name__ == "__main__":
93
+ main()