KaraKaraWitch commited on
Commit
7808a26
·
verified ·
1 Parent(s): 5f38e17

Add files using upload-large-folder tool

Browse files
config.json CHANGED
@@ -1,16 +1,12 @@
1
  {
2
- "_name_or_path": "nbeerbower/Llama-3.1-Nemotron-lorablated-70B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": [
10
- 128001,
11
- 128008,
12
- 128009
13
- ],
14
  "head_dim": 128,
15
  "hidden_act": "silu",
16
  "hidden_size": 8192,
@@ -35,6 +31,6 @@
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
  "transformers_version": "4.48.0",
38
- "use_cache": true,
39
  "vocab_size": 128256
40
  }
 
1
  {
2
+ "_name_or_path": "KaraKaraWitch/Llama-3.X-Workout-70B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
+ "eos_token_id": 128009,
 
 
 
 
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 8192,
 
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "bfloat16",
33
  "transformers_version": "4.48.0",
34
+ "use_cache": false,
35
  "vocab_size": 128256
36
  }
mergekit_config.yml CHANGED
@@ -16,5 +16,7 @@ models:
16
  - model: SicariusSicariiStuff/Negative_LLAMA_70B
17
 
18
  merge_method: sce
19
- base_model: nbeerbower/Llama-3.1-Nemotron-lorablated-70B
 
 
20
  dtype: bfloat16
 
16
  - model: SicariusSicariiStuff/Negative_LLAMA_70B
17
 
18
  merge_method: sce
19
+ base_model: KaraKaraWitch/Llama-3.X-Workout-70B
20
+ parameters:
21
+ select_topk: 0.75
22
  dtype: bfloat16
model-00001-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a2968a63494182abab04b6992e06ff59a6691cedad5f182d7782a37f12a59de7
3
  size 9806401040
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ccd273762057faf02b1ab93709d3147d56c163f1fd309fcf5b00805e6a6c8f0
3
  size 9806401040
model-00002-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1e0bd96fcdce5d761489e13c7850af54320d18ce85f604dc7b7a4f5abb2f1f3
3
  size 9798096968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5f9307415a589b89b8db55e936f797bbdc69e6cea11091aea1ec42873433826
3
  size 9798096968
model-00003-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05ac1f41bc1e369b4cd8a066d7521c493631816dc5f09b5c0c2d9f08a3c35678
3
  size 9982629608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7118911951e100adab80aa156423d1e53aa7061bb314cca0d56a9d10f95786d3
3
  size 9982629608
model-00004-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bdd7f14dceeaf61603e57bec526c5c6cce164fd805bcd40b5d3cadf43154c4f2
3
  size 9781303128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3717cfe7cd57bece437d0de6c52e2d05018e77920b990721fe66f290135652de
3
  size 9781303128
model-00005-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eb67ba8cf5f347543ed703d7ba52c380f93bd0e6aa3113fb2d58c0a5367bac13
3
  size 9798096952
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bc450f4476cdde5c099729a35699929365a9f76c219f70fd55c6dfedbc905d0
3
  size 9798096952
model-00006-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d677d7036ec5aa7cbe32e4596f308519f87f78ca32d0a8e2c20be6352220d82
3
  size 9798096968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e72401bf104f28bf7253b2234048456c2dcf5cdc4367a9ae43a7837bf4059e2
3
  size 9798096968
model-00007-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5554e4f1b8fe2e5080d0524ec9e16e1f6ef2fce0c8bbd33b338febde6b60950e
3
  size 9982629608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81b8ba34c0cfdd1cdbcc8dcb9a660f4be73fb8f404ccf70aa10fa1a01ca89167
3
  size 9982629608
model-00008-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f6973f3d7f576a350579c8a61f8859578b36cc1e80eae85ffb6bc6dfa5f8248
3
  size 9781303128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:baaa953136712cf805e9151bb000cf8eb53cdbad4e405da336de40ea0f8b0023
3
  size 9781303128
model-00009-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:13ddb5258b66a6f48daed021c9b9474797bd9ca39c583493b06f0671042ed60f
3
  size 9798096952
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff17bc060fad4b210a5c05dd744419e984f8703544f5106bdf89a1228f60fa39
3
  size 9798096952
model-00010-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ca23107dceb7410a98791cd7ace171df9b23401b74ba2b60636a893f083c2936
3
  size 9798096968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0a2db70a58c3214a69a2bd62548c77a7168ea25e965d56e4c5378502b683190
3
  size 9798096968
model-00011-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2fdb2fadc8d84eeada1f46db9b87ffac63f3d549abff33a02c1e7abe9780f33d
3
  size 9982629608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:496fbfcc3d3bff95dd2dfcb36dcf445d4c339606f226e51d65728cf84a0efa1c
3
  size 9982629608
model-00012-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:682ac10d241182f43cce09b976fec78e0a6f45693a461e1536398fd30f12e772
3
  size 9781303128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5dd10ae961bcb2a0ed2a8a5095000758bbcaa5bb31d5026d4d2fdc6ee05cc3e
3
  size 9781303128
model-00013-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f5ae94b1d035f9316e22be1b2e0d3a699151990f327d7fc690f56f32f9d04af
3
  size 9798096952
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab61c38f3cb0cc47f143f1cd75b28ee7557c40e364342e18821c1a3eaddce56b
3
  size 9798096952
model-00014-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b483ded7a3f3e5d88b87bd2fca4dd09794e748b3e28cf2adde62ed6001c2d53
3
  size 9798096968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0950ad8e2d8a79d4a0119b5619d289300277ebc1a82fcfc01007967d80efb354
3
  size 9798096968
model-00015-of-00015.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:31a47805dbac6bc1227068978ff5b14befb7653bf3352e768004fe1435369e1f
3
  size 3422619696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28b286c9a8a658f0df910cac7474e5f28ce4db99572d218debc6bccf71452f74
3
  size 3422619696
special_tokens_map.json CHANGED
@@ -7,7 +7,7 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|eot_id|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -2050,9 +2050,9 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|eot_id|>",
2056
  "extra_special_tokens": {},
2057
  "model_input_names": [
2058
  "input_ids",
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|end_of_text|>",
2056
  "extra_special_tokens": {},
2057
  "model_input_names": [
2058
  "input_ids",