MatanP commited on
Commit
1dc3d50
·
verified ·
1 Parent(s): 74b3118

End of training

Browse files
README.md CHANGED
@@ -1,7 +1,7 @@
1
  ---
2
- base_model: deepseek-ai/deepseek-llm-7b-base
3
  library_name: transformers
4
- model_name: SFT_FineTuned_DeepSeek-7B-v0.1
5
  tags:
6
  - generated_from_trainer
7
  - trl
@@ -9,9 +9,9 @@ tags:
9
  licence: license
10
  ---
11
 
12
- # Model Card for SFT_FineTuned_DeepSeek-7B-v0.1
13
 
14
- This model is a fine-tuned version of [deepseek-ai/deepseek-llm-7b-base](https://huggingface.co/deepseek-ai/deepseek-llm-7b-base).
15
  It has been trained using [TRL](https://github.com/huggingface/trl).
16
 
17
  ## Quick start
@@ -27,7 +27,7 @@ print(output["generated_text"])
27
 
28
  ## Training procedure
29
 
30
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/matansiva-anna-university/SFT_DEEPSEEKv1.1/runs/oyqljj01)
31
 
32
 
33
  This model was trained with SFT.
 
1
  ---
2
+ base_model: deepseek-ai/deepseek-llm-7b-chat
3
  library_name: transformers
4
+ model_name: SFT_FineTuned_DeepSeek-7B-chat-v0.1
5
  tags:
6
  - generated_from_trainer
7
  - trl
 
9
  licence: license
10
  ---
11
 
12
+ # Model Card for SFT_FineTuned_DeepSeek-7B-chat-v0.1
13
 
14
+ This model is a fine-tuned version of [deepseek-ai/deepseek-llm-7b-chat](https://huggingface.co/deepseek-ai/deepseek-llm-7b-chat).
15
  It has been trained using [TRL](https://github.com/huggingface/trl).
16
 
17
  ## Quick start
 
27
 
28
  ## Training procedure
29
 
30
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/matansiva-anna-university/SFT_DEEPSEEKv1.1/runs/oaihj0ih)
31
 
32
 
33
  This model was trained with SFT.
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "deepseek-ai/deepseek-llm-7b-base",
5
  "bias": "none",
6
  "eva_config": null,
7
  "exclude_modules": null,
@@ -26,12 +26,12 @@
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
 
29
  "down_proj",
30
  "k_proj",
31
  "o_proj",
32
- "up_proj",
33
- "q_proj",
34
- "v_proj"
35
  ],
36
  "task_type": "CAUSAL_LM",
37
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "deepseek-ai/deepseek-llm-7b-chat",
5
  "bias": "none",
6
  "eva_config": null,
7
  "exclude_modules": null,
 
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
29
+ "up_proj",
30
  "down_proj",
31
  "k_proj",
32
  "o_proj",
33
+ "v_proj",
34
+ "q_proj"
 
35
  ],
36
  "task_type": "CAUSAL_LM",
37
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32a61dc335bbcb885fcdb29afa597118c696a39ccc504b7b0a06ee3112e7d617
3
  size 10073752920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ef9953aa89a5e9432230d292318a48f483b5916a5b4283491e0ae749da4b621
3
  size 10073752920
tokenizer_config.json CHANGED
@@ -125,6 +125,7 @@
125
  }
126
  },
127
  "bos_token": "<|begin▁of▁sentence|>",
 
128
  "clean_up_tokenization_spaces": false,
129
  "eos_token": "<|end▁of▁sentence|>",
130
  "legacy": true,
 
125
  }
126
  },
127
  "bos_token": "<|begin▁of▁sentence|>",
128
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'User: ' + message['content'] + '\n\n' }}{% elif message['role'] == 'assistant' %}{{ 'Assistant: ' + message['content'] + eos_token }}{% elif message['role'] == 'system' %}{{ message['content'] + '\n\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant:' }}{% endif %}",
129
  "clean_up_tokenization_spaces": false,
130
  "eos_token": "<|end▁of▁sentence|>",
131
  "legacy": true,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1b9223d1e8f892752b4d46fbef7f79d07b8a72201343939f08392395de8c301
3
  size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901a0d085a9a569d944f3538f5538d28d883ae77496fdd4b2c098d6f0b99680e
3
  size 5560