Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,21 +1,24 @@
|
|
|
|
1 |
from huggingface_hub import InferenceClient
|
2 |
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
|
|
7 |
|
8 |
messages = [
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
]
|
14 |
|
|
|
15 |
completion = client.chat.completions.create(
|
16 |
model="meta-llama/Llama-2-7b-chat-hf",
|
17 |
-
|
18 |
-
|
19 |
)
|
20 |
|
21 |
-
print(completion.choices[0].message)
|
|
|
1 |
+
import os
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
4 |
+
# Set your Hugging Face API key as an environment variable for security
|
5 |
+
os.environ["HUGGINGFACE_API_KEY"] = "hf_xxxxxxxxxxxxxxxxxxxxxxxx"
|
6 |
+
|
7 |
+
# Initialize the client without the 'provider' argument
|
8 |
+
client = InferenceClient()
|
9 |
|
10 |
messages = [
|
11 |
+
{
|
12 |
+
"role": "user",
|
13 |
+
"content": "What is the capital of France?"
|
14 |
+
}
|
15 |
]
|
16 |
|
17 |
+
# Use the chat API with the specified model and messages
|
18 |
completion = client.chat.completions.create(
|
19 |
model="meta-llama/Llama-2-7b-chat-hf",
|
20 |
+
messages=messages,
|
21 |
+
max_tokens=500
|
22 |
)
|
23 |
|
24 |
+
print(completion.choices[0].message)
|