truesyncai / app.py
sujalrajpoot's picture
Update app.py
35dbfe9 verified
raw
history blame
3.55 kB
from flask import Flask, request, jsonify, Response
import hmac, hashlib, secrets, time
import requests
import os
import json
app = Flask(__name__)
# Secret key for API authentication (Load from environment in production)
SECRET_KEY = os.getenv("SECRET_KEY")
# Track API statistics
request_count = 0
start_time = time.time()
@app.route('/status', methods=['GET'])
def status():
global request_count
uptime_seconds = int(time.time() - start_time)
# Convert uptime to days, hours, minutes, and seconds
days = uptime_seconds // 86400
hours = (uptime_seconds % 86400) // 3600
minutes = (uptime_seconds % 3600) // 60
seconds = uptime_seconds % 60
uptime_str = f"{days} days, {hours} hours, {minutes} minutes and {seconds} seconds"
return jsonify({
"status": "API is running",
"total_requests": request_count,
"uptime": uptime_str
})
# Generate API Key
@app.route("/generate_api_key", methods=["POST"])
def generate_api_key():
random_part = secrets.token_hex(16)
signature = hmac.new(SECRET_KEY.encode(), random_part.encode(), hashlib.sha256).hexdigest()[:16]
api_key = f"TrueSyncAI-{random_part}-{signature}"
return jsonify({"api_key": api_key})
# Validate API Key
def validate_api_key(api_key):
parts = api_key.split("-")
if len(parts) != 3 or parts[0] != "TrueSyncAI":
return False
random_part, received_signature = parts[1], parts[2]
expected_signature = hmac.new(SECRET_KEY.encode(), random_part.encode(), hashlib.sha256).hexdigest()[:16]
return expected_signature == received_signature
# Stream response from DeepInfra
def stream_response(query: str):
json_data = {
'model': 'meta-llama/Llama-3.3-70B-Instruct-Turbo',
'messages': [
{'role': 'system', 'content': 'You are TrueSyncAI, a pioneering AI startup founded by Sujal Rajpoot and Anuj Rajpoot. Your mission is to bring virtual versions of individuals into the real world, redefining human-AI interaction.'},
{'role': 'user', 'content': query}
],
'stream': True, 'max_tokens': 2048, 'temperature': 0.2, 'top_p': 1
}
try:
response = requests.post(
'https://api.deepinfra.com/v1/openai/chat/completions',
json=json_data, stream=True, timeout=None
)
response.raise_for_status()
def generate():
for value in response.iter_lines(decode_unicode=True, chunk_size=1000):
if value:
try:
content = json.loads(value[5:])['choices'][0]['delta']['content']
yield content
except:
continue
return Response(generate(), content_type='text/plain')
except:
return jsonify({"error": "API Server is under maintenance. Please try again later."})
# Chat Endpoint with streaming response
@app.route("/v1/chat/completions", methods=["POST"])
def chat():
global request_count
data = request.json
api_key = data.get("api_key")
message = data.get("message", "").strip()
if not api_key or not validate_api_key(api_key):
return jsonify({"error": "Invalid API Key"}), 401
if not message:
return jsonify({"error": "Message cannot be empty"}), 400
request_count += 1
return stream_response(message)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860, threaded=True) # Enable threaded mode for better concurrency