Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,5 @@
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
@@ -5,16 +7,16 @@ import requests
|
|
5 |
|
6 |
# 추론 API 클라이언트 설정
|
7 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
|
8 |
-
|
9 |
|
10 |
def load_code(filename):
|
11 |
try:
|
12 |
with open(filename, 'r', encoding='utf-8') as file:
|
13 |
return file.read()
|
14 |
except FileNotFoundError:
|
15 |
-
return f"{filename}
|
16 |
except Exception as e:
|
17 |
-
return f"
|
18 |
|
19 |
fashion_code = load_code('fashion.cod')
|
20 |
uhdimage_code = load_code('uhdimage.cod')
|
@@ -22,10 +24,10 @@ uhdimage_code = load_code('uhdimage.cod')
|
|
22 |
def respond(
|
23 |
message,
|
24 |
history: list[tuple[str, str]],
|
25 |
-
system_message="",
|
26 |
-
max_tokens=7860,
|
27 |
-
temperature=0.8,
|
28 |
-
top_p=0.9,
|
29 |
):
|
30 |
global fashion_code, uhdimage_code
|
31 |
system_prefix = """
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
import gradio as gr
|
4 |
from huggingface_hub import InferenceClient
|
5 |
import os
|
|
|
7 |
|
8 |
# 추론 API 클라이언트 설정
|
9 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
|
10 |
+
|
11 |
|
12 |
def load_code(filename):
|
13 |
try:
|
14 |
with open(filename, 'r', encoding='utf-8') as file:
|
15 |
return file.read()
|
16 |
except FileNotFoundError:
|
17 |
+
return f"{filename} not files"
|
18 |
except Exception as e:
|
19 |
+
return f"file reading error: {str(e)}"
|
20 |
|
21 |
fashion_code = load_code('fashion.cod')
|
22 |
uhdimage_code = load_code('uhdimage.cod')
|
|
|
24 |
def respond(
|
25 |
message,
|
26 |
history: list[tuple[str, str]],
|
27 |
+
system_message="",
|
28 |
+
max_tokens=7860,
|
29 |
+
temperature=0.8,
|
30 |
+
top_p=0.9,
|
31 |
):
|
32 |
global fashion_code, uhdimage_code
|
33 |
system_prefix = """
|