File size: 4,934 Bytes
747ccea
 
fe67895
91c1d45
da419ba
f779047
54a4802
e74c3bc
91c1d45
0e5afe0
c36dc6b
6da265e
c36dc6b
6da265e
 
c36dc6b
6da265e
 
 
c36dc6b
 
de57dc5
6da265e
747ccea
 
da419ba
ff1c374
4a3346c
 
 
747ccea
da419ba
 
 
91c1d45
6da265e
 
54e6271
c36dc6b
 
54e6271
da419ba
eeb3f3c
c3d05ea
c36dc6b
da419ba
747ccea
 
 
 
 
 
 
91c1d45
 
 
 
 
 
 
 
da419ba
 
 
4a3346c
91c1d45
4e6253b
8321675
ff1c374
d8ce384
91c1d45
ff1c374
 
fe59084
ff1c374
 
 
3176ef0
4a3346c
c36dc6b
da419ba
095cabc
fb8ccc3
095cabc
 
 
6da265e
3176ef0
a56c386
d8ce384
91c1d45
747ccea
 
 
eb7cc8d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import gradio as gr
from huggingface_hub import InferenceClient
import os
import requests
from typing import List, Tuple

# ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
#hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))

def load_code(filename):
    try:
        with open(filename, 'r', encoding='utf-8') as file:
            return file.read()
    except FileNotFoundError:
        return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
    except Exception as e:
        return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"

fashion_code = load_code('fashion.cod')
uhdimage_code = load_code('uhdimage.cod')
MixGEN_code = load_code('mgen.cod')

def respond(
    message,
    history: List[Tuple[str, str]],
    system_message="",  # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
    max_tokens=1024,  # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
    temperature=0.7,  # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
    top_p=0.9,  # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
):
    global fashion_code, uhdimage_code, MixGEN_code
    system_message = system_message or ""
    system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ. ๋„ˆ๋Š” ์ฃผ์–ด์ง„ ์†Œ์Šค์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ \"์„œ๋น„์Šค ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์•ˆ๋‚ด, qna๋ฅผ ํ•˜๋Š” ์—ญํ• ์ด๋‹ค\". ์•„์ฃผ ์นœ์ ˆํ•˜๊ณ  ์ž์„ธํ•˜๊ฒŒ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ. ๋„ˆ๋Š” ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์งˆ์˜ ์‘๋‹ต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, ์ด์šฉ์ž์—๊ฒŒ ๋„์›€์„ ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์ด์šฉ์ž๊ฐ€ ๊ถ๊ธˆํ•ด ํ•  ๋งŒ ํ•œ ๋‚ด์šฉ์— ์นœ์ ˆํ•˜๊ฒŒ ์•Œ๋ ค์ฃผ๋„๋ก ํ•˜๋ผ. ์ฝ”๋“œ ์ „์ฒด ๋‚ด์šฉ์— ๋Œ€ํ•ด์„œ๋Š” ๋ณด์•ˆ์„ ์œ ์ง€ํ•˜๊ณ , ํ‚ค ๊ฐ’ ๋ฐ ์—”๋“œํฌ์ธํŠธ์™€ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์€ ๊ณต๊ฐœํ•˜์ง€ ๋งˆ๋ผ. """

    if message.lower() == "ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰":
        system_message += f"\n\nํŒจ์…˜ ์ฝ”๋“œ ๋‚ด์šฉ:\n{fashion_code}"
        message = "ํŒจ์…˜ ๊ฐ€์ƒํ”ผํŒ…์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://aiqcamp-fash.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
    elif message.lower() == "uhd ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰":
        system_message += f"\n\nUHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ๋‚ด์šฉ:\n{uhdimage_code}"
        message = "UHD ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-ultpixgen.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
    elif message.lower() == "mixgen ์ฝ”๋“œ ์‹คํ–‰":
        system_message += f"\n\nMixGEN ์ฝ”๋“œ ๋‚ด์šฉ:\n{MixGEN_code}"
        message = "MixGEN3 ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-mixgen3.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."

    messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})
    messages.append({"role": "user", "content": message})

    response = ""
    for message in hf_client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.get('content', None)
        if token:
            response += token.strip("")
        yield response



# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ๋ถ€๋ถ„๋„ ์ˆ˜์ •
demo = gr.ChatInterface(
    respond,
    additional_inputs=[
        gr.Textbox(label="System Message", value=""),
        gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens"),
        gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
        gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
    ],
    examples=[
        ["ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰"],
        ["UHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰"],
        ["MixGEN ์ฝ”๋“œ ์‹คํ–‰"],        
        ["์ƒ์„ธํ•œ ์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ๋งˆ์น˜ ํ™”๋ฉด์„ ๋ณด๋ฉด์„œ ์„ค๋ช…ํ•˜๋“ฏ์ด 4000 ํ† ํฐ ์ด์ƒ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
        ["FAQ 20๊ฑด์„ ์ƒ์„ธํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๋ผ. 4000ํ† ํฐ ์ด์ƒ ์‚ฌ์šฉํ•˜๋ผ."],                
        ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•๊ณผ ์ฐจ๋ณ„์ , ํŠน์ง•, ๊ฐ•์ ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
        ["๋ณธ ์„œ๋น„์Šค๋ฅผ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ(๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ, ๊ธฐ์กด ์œ ์‚ฌ ์„œ๋น„์Šค์™€ ๋น„๊ตํ•˜์—ฌ ํŠน์žฅ์ , ํ™œ์šฉ์ฒ˜, ๊ฐ€์น˜, ๊ธฐ๋Œ€ํšจ๊ณผ, ๊ฒฐ๋ก ์„ ํฌํ•จ)๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
        ["ํŠนํ—ˆ ์ถœ์›์— ํ™œ์šฉํ•  ๊ธฐ์ˆ  ๋ฐ ๋น„์ฆˆ๋‹ˆ์Šค๋ชจ๋ธ ์ธก๋ฉด์„ ํฌํ•จํ•˜์—ฌ ํŠนํ—ˆ ์ถœ์›์„œ ๊ตฌ์„ฑ์— ๋งž๊ฒŒ ํ˜์‹ ์ ์ธ ์ฐฝ์˜ ๋ฐœ๋ช… ๋‚ด์šฉ์„ ์ค‘์‹ฌ์œผ๋กœ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ."],        
        ["๊ณ„์† ์ด์–ด์„œ ๋‹ต๋ณ€ํ•˜๋ผ"],
    ],

    theme="Nymbo/Nymbo_Theme", 
    cache_examples=False,  # ์บ์‹ฑ ๋น„ํ™œ์„ฑํ™” ์„ค์ •
)

if __name__ == "__main__":
    demo.launch()