File size: 3,320 Bytes
38742d7
 
6634f63
1978c10
38742d7
61e0458
38742d7
1978c10
3f23d73
1978c10
113c3ed
a50a704
6634f63
5e1003d
a50a704
 
 
 
 
 
6634f63
1978c10
113c3ed
1978c10
6cbc38e
1978c10
f0ac2d8
1978c10
 
113c3ed
1978c10
29ba4e2
38742d7
e73c7fc
6cbc38e
 
e73c7fc
6cbc38e
e73c7fc
29ba4e2
6cbc38e
1473813
38742d7
6cbc38e
6634f63
38742d7
 
 
 
74a629f
38742d7
 
 
 
 
 
 
 
 
 
 
6634f63
29ba4e2
 
1978c10
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import gradio as gr
from flores import code_mapping
from functools import lru_cache
import openai  # 用于调用外部API

code_mapping = dict(sorted(code_mapping.items(), key=lambda item: item[0]))
flores_codes = list(code_mapping.keys())
target_languages = flores_codes  # 简化列表

# 假设openai_client已定义,例如:


@lru_cache(maxsize=100)
def translate(text: str, src_lang: str, tgt_lang: str):
    if not src_lang:
        raise gr.Error("The source language is empty! Please choose it in the dropdown list.")
    if not tgt_lang:
        raise gr.Error("The target language is empty! Please choose it in the dropdown list.")
    return _translate(text, src_lang, tgt_lang)

def _translate(text: str, src_lang: str, tgt_lang: str):
    prompt = f"Translate the following text from {src_lang} to {tgt_lang}: {text}"
    openai_client = openai.OpenAI(base_url="https://ssapi.cppbear.site/v1", api_key="sk-5VFSx79t3fLQk4BtN68WTeeRckZ12CONElR7RsK39zrQysji")
    response = openai_client.chat.completions.create(
        model="gemini-2.5-flash",  # 如gpt-3.5-turbo或其他兼容模型
        messages=[{"role": "user", "content": prompt}],
        max_tokens=30240,
        temperature=0.0
    )
    print(response)
    return response.choices[0].message.content.strip()

description = """
<div style="text-align: center;">
    <img src="https://github.com/user-attachments/assets/c42e675e-497c-4508-8bb9-093ad4d1f216" alt="UNESCO Meta Hugging Face Banner" style="max-width: 800px; width: 100%; margin: 0 auto;">
    <h1 style="color: #0077be;">Seed-X, powered by Bytedance</h1>
</div>
We are excited to introduce Seed-X, a powerful series of open-source multilingual translation language models, including an instruction model, a reinforcement learning model, and a reward model. It pushes the boundaries of translation capabilities within 7 billion parameters. We develop Seed-X as an accessible, off-the-shelf tool to support the community in advancing translation research and applications:
"""
disclaimer = """
We are excited to introduce Seed-X, a powerful series of open-source multilingual translation language models, including an instruction model, a reinforcement learning model, and a reward model. It pushes the boundaries of translation capabilities within 7 billion parameters. We develop Seed-X as an accessible, off-the-shelf tool to support the community in advancing translation research and applications
"""

examples_inputs = [["Seed-X is indeed a good translation model ","English","Chinese"],]

with gr.Blocks() as demo:
    gr.Markdown(description)
    with gr.Row():
        src_lang = gr.Dropdown(label="Source Language", choices=flores_codes)
        target_lang = gr.Dropdown(label="Target Language", choices=target_languages)
    with gr.Row():
        input_text = gr.Textbox(label="Input Text", lines=6)
    with gr.Row():
        btn = gr.Button("Translate text")
    with gr.Row():
        output = gr.Textbox(label="Output Text", lines=6)
    btn.click(
        translate,
        inputs=[input_text, src_lang, target_lang],
        outputs=output,
    )
    examples = gr.Examples(examples=examples_inputs,inputs=[input_text, src_lang,target_lang], fn=translate, outputs=output, cache_examples=True)
    with gr.Row():
        gr.Markdown(disclaimer)
demo.launch()