Spaces:
Sleeping
Sleeping
File size: 2,063 Bytes
a600446 f5b7d69 a600446 f5b7d69 a600446 29752d3 a600446 7447bf0 a600446 29752d3 a600446 29752d3 a600446 29752d3 a600446 29752d3 a600446 29752d3 a600446 7447bf0 a600446 29752d3 bbe9c4c a600446 29752d3 a600446 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
from transformers import AutoTokenizer
import transformers
import os
import sys
import fire
import torch
import gradio as gr
def main(
base_model="ise-uiuc/Magicoder-S-DS-6.7B"
):
pipeline = transformers.pipeline(
"text-generation", model=base_model, torch_dtype=torch.bfloat16, device_map="auto"
)
def evaluate_magicoder(
instruction,
temperature=1,
max_length=2048,
):
MAGICODER_PROMPT = """You are an exceptionally intelligent coding assistant that consistently delivers accurate and reliable responses to user instructions.
@@ Instruction
{instruction}
@@ Response
"""
prompt = MAGICODER_PROMPT.format(instruction=instruction)
if temperature > 0:
sequences = pipeline(
prompt,
do_sample=True,
temperature=temperature,
max_length=max_length,
)
else:
sequences = pipeline(
prompt,
max_length=max_length,
)
for seq in sequences:
generated_text = seq["generated_text"].replace(prompt, "")
return generated_text
gr.Interface(
fn=evaluate_magicoder,
inputs=[
gr.components.Textbox(
lines=3,
label="Instruction",
placeholder="Anything you want to ask Magicoder ?",
),
gr.components.Slider(minimum=0, maximum=1, value=0, label="Temperature"),
gr.components.Slider(
minimum=1, maximum=2048, step=1, value=1024, label="Max tokens"
),
],
outputs=[
gr.components.Textbox(
lines=30,
label="Output",
)
],
title="Magicoder",
description="This is a playground for Magicoder-S-DS-6.7B! Follow us on Github: https://github.com/ise-uiuc/magicoder and Huggingface: https://huggingface.co/ise-uiuc.",
).queue().launch()
if __name__ == "__main__":
fire.Fire(main)
|