mwitiderrick commited on
Commit
cc4c8b3
·
1 Parent(s): e857200

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -6
README.md CHANGED
@@ -3,9 +3,9 @@ base_model: mediocredev/open-llama-3b-v2-instruct
3
  inference: false
4
  model_type: llama
5
  prompt_template: |
6
- Q:
7
  {prompt}
8
- \nA
9
  quantized_by: mwitiderrick
10
  tags:
11
  - deepsparse
@@ -24,8 +24,10 @@ Run in a [Python pipeline](https://github.com/neuralmagic/deepsparse/blob/main/d
24
  ```python
25
  from deepsparse import TextGeneration
26
 
27
- prompt = 'Q: What is the largest animal?\nA:'
28
- formatted_prompt = f"Q: {prompt}\nA:"
 
 
29
 
30
  model = TextGeneration(model_path="hf:nm-testing/open_llama_3b-pruned-quant-50")
31
  print(model(formatted_prompt, max_new_tokens=200).generations[0].text)
@@ -37,9 +39,9 @@ print(model(formatted_prompt, max_new_tokens=200).generations[0].text)
37
  ## Prompt template
38
 
39
  ```
40
- Q:
41
  {prompt}
42
- \nA:
43
 
44
  ```
45
  ## Sparsification
 
3
  inference: false
4
  model_type: llama
5
  prompt_template: |
6
+ ### User:\n
7
  {prompt}
8
+ ### Assistant:\n
9
  quantized_by: mwitiderrick
10
  tags:
11
  - deepsparse
 
24
  ```python
25
  from deepsparse import TextGeneration
26
 
27
+ system_message = 'You are a helpful assistant, who always provide explanation.'
28
+ user_message = 'How many days are there in a leap year?'
29
+
30
+ formatted_prompt = f'### System:\n{system_message}<|endoftext|>\n### User:\n{user_message}<|endoftext|>\n### Assistant:\n'
31
 
32
  model = TextGeneration(model_path="hf:nm-testing/open_llama_3b-pruned-quant-50")
33
  print(model(formatted_prompt, max_new_tokens=200).generations[0].text)
 
39
  ## Prompt template
40
 
41
  ```
42
+ ### User:\n
43
  {prompt}
44
+ ### Assistant:\n
45
 
46
  ```
47
  ## Sparsification