selectorseb commited on
Commit
37eb2e2
·
verified ·
1 Parent(s): d15df6a

(Trained with Unsloth)

Browse files
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f6ebba4af666095386cb6808214c906d2b051bc92df89d3476c121e28746194c
3
  size 4976698672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23d42b282e0f0a27462ad420f1eb0ee511819be1cba99f148a6527202e11f91b
3
  size 4976698672
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7d25e212a2f0a52f51de08c0c51274b3b90144e203df40ff6f197bccfa0d3204
3
  size 4999802720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:250505cb371df8ad94f7293bc5783a7db4ce633276cbb6ee82b3b71f4e643f24
3
  size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f38145934300a66dfe33f374330eaae43d702598c98a00c655597d28be15a4e4
3
  size 4915916176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4339110e3ad1684829be2b1c6f136aba8379ceba715d982f7ef8bd07e5bfb73
3
  size 4915916176
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3e49523dcc74f5e5b54db530a82fc59ebae0d5d0367affd9b8c1831fb37cb76f
3
  size 1168138808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbb9737f914184effd5016843f023d84afb312fc94f7a22856606eb19eb07c1f
3
  size 1168138808
tokenizer_config.json CHANGED
@@ -2050,7 +2050,7 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{{ bos_token + '\n'}}{{ 'Cutting Knowledge Date: December 2023' + '\n' 'Today Date: 23 July 2024'+ '\n'}}{{ 'When you receive a tool call response, use the output to format an answer to the orginal user question'+ '\n'}}{{ 'You are an intelligent AI that controls a drone. Given a command or request from the user'+ '\n'}}{{ 'call one of your functions to complete the request. If the request cannot be completed by your available functions, respond to the question with the best of you knowledge.'+ '\n'}}{{ 'If the request is ambiguous or unclear, reject the request.'+ '\n'}}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt.'+'\n'}}{{ 'Respond in the format {name: function name, parameters: dictionary of argument name and its value}. Do not use variables.'+ '\n'}}{{ '>>> User: ' + message['content'] + '\n' }}{% elif message['role'] == 'function' %}{% for function in message['content'] %}{{function | tojson(indent=2)}}{{'\n'}}{% endfor %}{% elif message['role'] == 'assistant' %}{{'>>> Assistant: ' + message['content'] + eos_token + '\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '>>> Assistant: ' }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{{ bos_token + '\n'}}{{ 'Cutting Knowledge Date: December 2023' + '\n' 'Today Date: 23 July 2024'+ '\n'}}{{ 'When you receive a tool call response, use the output to format an answer to the orginal user question'+ '\n'}}{{ 'You are a helpful assistant with tool calling capabilities'+'\n'}}{{ 'You are an intelligent AI that controls a drone. Given a command or request from the user'+ '\n'}}{{ 'call one of your functions to complete the request. If the request cannot be completed by your available functions, respond to the question with the best of you knowledge.'+ '\n'}}{{ 'If the request is ambiguous or unclear, say so to ther User.'+ '\n'}}{% for message in messages %}{% if message['role'] == 'user' %}{{ '>>> User: ' + message['content'] + '\n'}}{% elif message['role'] == 'function' %}{{ 'Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt.'+'\n'}}{{'Respond in the format {name: function name, parameters: dictionary of argument name and its value}. Do not use variables.'+ '\n'}}{% for function in message['content'] %}{{function | tojson(indent=2)}}{{'\n'}}{% endfor %}{% elif message['role'] == 'assistant' %}{{'>>> Assistant: ' + message['content'] + eos_token + '\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '>>> Assistant: ' }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
2056
  "model_input_names": [