File size: 401 Bytes
add165b
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
from huggingface_hub import InferenceClient

MODEL_NAME = "meta-llama/Meta-Llama-3-70b-Instruct"

def clean_transcript(prompt, transcript):
    messages = [
        {"role": "user", "content": PROMPT}
    ]
    client = InferenceClient(model=MODEL_NAME)
    for c in client.chat_completion(messages, max_tokens=200, stream=True):
        token = c.choices[0].delta.content
        print(token, end="")