Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import logging
|
|
3 |
import gradio as gr
|
4 |
from transformers import pipeline
|
5 |
|
6 |
-
from llama_cpp_agent.providers import
|
7 |
from llama_cpp_agent import LlamaCppAgent, MessagesFormatterType
|
8 |
from llama_cpp_agent.chat_history import BasicChatHistory
|
9 |
from llama_cpp_agent.chat_history.messages import Roles
|
@@ -46,13 +46,13 @@ def get_messages_formatter_type(model_name):
|
|
46 |
else:
|
47 |
return MessagesFormatterType.CHATML
|
48 |
|
49 |
-
class HuggingFaceHubProvider(
|
50 |
def __init__(self, model):
|
51 |
self.model = model
|
52 |
|
53 |
def create_completion(self, prompt, **kwargs):
|
54 |
-
|
55 |
-
|
56 |
|
57 |
def get_provider_default_settings(self):
|
58 |
return self.model.model_kwargs
|
|
|
3 |
import gradio as gr
|
4 |
from transformers import pipeline
|
5 |
|
6 |
+
from llama_cpp_agent.providers.llama_cpp_endpoint_provider import LlamaCppEndpointSettings
|
7 |
from llama_cpp_agent import LlamaCppAgent, MessagesFormatterType
|
8 |
from llama_cpp_agent.chat_history import BasicChatHistory
|
9 |
from llama_cpp_agent.chat_history.messages import Roles
|
|
|
46 |
else:
|
47 |
return MessagesFormatterType.CHATML
|
48 |
|
49 |
+
class HuggingFaceHubProvider(LlamaCppEndpointSettings):
|
50 |
def __init__(self, model):
|
51 |
self.model = model
|
52 |
|
53 |
def create_completion(self, prompt, **kwargs):
|
54 |
+
response = self.model(prompt)
|
55 |
+
return {'choices': [{'text': response['generated_text']}]}
|
56 |
|
57 |
def get_provider_default_settings(self):
|
58 |
return self.model.model_kwargs
|