import gradio as gr from huggingface_hub import InferenceClient """ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference client = InferenceClient("meta-llama/Meta-Llama-3-8B") def respond( message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p, ): messages = [{"role": "system", "content": system_message}] for val in history: if val[0]: messages.append({"role": "user", "content": val[0]}) if val[1]: messages.append({"role": "assistant", "content": val[1]}) messages.append({"role": "user", "content": message}) response = "" for message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = message.choices[0].delta.content response += token yield response """ model_id = "meta-llama/Meta-Llama-3-8B-Instruct" pipeline = transformers.pipeline( "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device="cuda", ) def chat_function(message, history, system_prompt, max_new_tokens, temperature): messages = [{"role":"system","content":system_prompt}, {"role":"user", "content":message}] prompt = pipeline.tokenizer.apply_chat_template( messages, tokenize=False, add_generation_prompt=True,) terminators = [ pipeline.tokenizer.eos_token_id, pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>")] outputs = pipeline( prompt, max_new_tokens = max_new_tokens, eos_token_id = terminators, do_sample = True, temperature = temperature + 0.1, top_p = 0.9,) return outputs[0]["generated_text"][len(prompt):] """ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface """ demo = gr.ChatInterface( chat_function, textbox=gr.Textbox(placeholder="Enter message here", container=False, scale = 7), chatbot=gr.Chatbot(height=400), additional_inputs=[ gr.Textbox("You are helpful AI", label="System Prompt"), gr.Slider(500,4000, label="Max New Tokens"), gr.Slider(0,1, label="Temperature", value= 0.7) ], ) if __name__ == "__main__": demo.launch()