Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -12,13 +12,14 @@ all_input = st.text_area('模型输入', value="""
|
|
12 |
top_p = st.slider('top_p', 0.0, 1.0, 0.95)
|
13 |
temperature = st.slider('temperature', 0.0, 1.0, 0.85)
|
14 |
max_tokens = st.slider('max tokens', 4, 512, 64)
|
|
|
15 |
|
16 |
|
17 |
def completion(prompt):
|
18 |
start = time.monotonic()
|
19 |
resp = requests.post('https://welm.weixin.qq.com/v1/completions', json={
|
20 |
'prompt': prompt,
|
21 |
-
'model':
|
22 |
'max_tokens': max_tokens,
|
23 |
'temperature': temperature,
|
24 |
'top_p': top_p,
|
|
|
12 |
top_p = st.slider('top_p', 0.0, 1.0, 0.95)
|
13 |
temperature = st.slider('temperature', 0.0, 1.0, 0.85)
|
14 |
max_tokens = st.slider('max tokens', 4, 512, 64)
|
15 |
+
model_type = st.selectbox('model', ('medium', 'large', 'xl'))
|
16 |
|
17 |
|
18 |
def completion(prompt):
|
19 |
start = time.monotonic()
|
20 |
resp = requests.post('https://welm.weixin.qq.com/v1/completions', json={
|
21 |
'prompt': prompt,
|
22 |
+
'model': model_type,
|
23 |
'max_tokens': max_tokens,
|
24 |
'temperature': temperature,
|
25 |
'top_p': top_p,
|