mrm8488 commited on
Commit
d8728e8
Β·
1 Parent(s): 6e1e776

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -4
app.py CHANGED
@@ -6,11 +6,9 @@ device = 'cuda' if torch.cuda.is_available() else 'cpu'
6
 
7
  title = "Santacoder bash/shell Completion πŸŽ…"
8
  description = "This is a subspace to make code generation with [SantaCoder fine-tuned on The Stack bash/shell](https://huggingface.co/mrm8488/santacoder-finetuned-the-stack-bash-4)"
 
9
  example = [
10
- ["def print_hello_world():", 8, 0.6, 42],
11
- ["def get_file_size(filepath):", 40, 0.6, 42],
12
- ["def count_lines(filename):", 40, 0.6, 42],
13
- ["def count_words(filename):", 40, 0.6, 42]]
14
  tokenizer = AutoTokenizer.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4")
15
  model = AutoModelForCausalLM.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4", trust_remote_code=True).to(device)
16
 
 
6
 
7
  title = "Santacoder bash/shell Completion πŸŽ…"
8
  description = "This is a subspace to make code generation with [SantaCoder fine-tuned on The Stack bash/shell](https://huggingface.co/mrm8488/santacoder-finetuned-the-stack-bash-4)"
9
+ EXAMPLE_0 = "#!/bin/bash\n# This script read the files in the current dir and remove it if greater than 2MB"
10
  example = [
11
+ [EXAMPLE_0, 128, 0.6, 42]]
 
 
 
12
  tokenizer = AutoTokenizer.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4")
13
  model = AutoModelForCausalLM.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4", trust_remote_code=True).to(device)
14