Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_community.chat_models import ChatOllama
|
| 2 |
+
from langchain_core.output_parsers import StrOutputParser
|
| 3 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 4 |
+
|
| 5 |
+
# Local Llama3
|
| 6 |
+
llm = ChatOllama(
|
| 7 |
+
model="llama3",
|
| 8 |
+
keep_alive=-1,
|
| 9 |
+
temperature=0,
|
| 10 |
+
max_new_tokens=512)
|
| 11 |
+
|
| 12 |
+
# you can changed template for genereted your text
|
| 13 |
+
prompt = ChatPromptTemplate.from_template("Write me a 100 word article on {topic} from the perspective of a {profession}. ")
|
| 14 |
+
|
| 15 |
+
chain = prompt | llm | StrOutputParser()
|
| 16 |
+
|
| 17 |
+
for chunk in chain.stream({"topic": "LLMs", "profession": "labor"}):
|
| 18 |
+
print(chunk, end="", flush=True)
|