NohTow commited on
Commit
8da62b1
·
verified ·
1 Parent(s): aa9530c

Adding the dummy pooling layer back

Browse files
config.json CHANGED
@@ -19,7 +19,7 @@
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
21
  "torch_dtype": "float32",
22
- "transformers_version": "4.45.0.dev0",
23
  "type_vocab_size": 2,
24
  "use_cache": true,
25
  "vocab_size": 30522
 
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
21
  "torch_dtype": "float32",
22
+ "transformers_version": "4.48.2",
23
  "type_vocab_size": 2,
24
  "use_cache": true,
25
  "vocab_size": 30522
config_sentence_transformers.json CHANGED
@@ -1,12 +1,12 @@
1
  {
2
  "__version__": {
3
- "sentence_transformers": "3.0.1",
4
- "transformers": "4.45.0.dev0",
5
- "pytorch": "2.4.0+cu121"
6
  },
7
  "prompts": {},
8
  "default_prompt_name": null,
9
- "similarity_fn_name": null,
10
  "query_prefix": "[unused0]",
11
  "document_prefix": "[unused1]",
12
  "query_length": 32,
 
1
  {
2
  "__version__": {
3
+ "sentence_transformers": "3.4.1",
4
+ "transformers": "4.48.2",
5
+ "pytorch": "2.5.1+cu124"
6
  },
7
  "prompts": {},
8
  "default_prompt_name": null,
9
+ "similarity_fn_name": "MaxSim",
10
  "query_prefix": "[unused0]",
11
  "document_prefix": "[unused1]",
12
  "query_length": 32,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dc0663e33ba672f20177adba1fb0b4a471ca8d208436a8f37b0474795402164d
3
- size 435588776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc984a3dfbe2a0d8939e0ee4db45aa071da2d9e9ef9817a86e52f5f55a274305
3
+ size 437951328
modules.json CHANGED
@@ -9,6 +9,6 @@
9
  "idx": 1,
10
  "name": "1",
11
  "path": "1_Dense",
12
- "type": "pylate.models.Dense"
13
  }
14
  ]
 
9
  "idx": 1,
10
  "name": "1",
11
  "path": "1_Dense",
12
+ "type": "pylate.models.Dense.Dense"
13
  }
14
  ]
tokenizer_config.json CHANGED
@@ -57,9 +57,10 @@
57
  "special": true
58
  }
59
  },
60
- "clean_up_tokenization_spaces": true,
61
  "cls_token": "[CLS]",
62
  "do_lower_case": true,
 
63
  "mask_token": "[MASK]",
64
  "model_max_length": 512,
65
  "pad_token": "[MASK]",
 
57
  "special": true
58
  }
59
  },
60
+ "clean_up_tokenization_spaces": false,
61
  "cls_token": "[CLS]",
62
  "do_lower_case": true,
63
+ "extra_special_tokens": {},
64
  "mask_token": "[MASK]",
65
  "model_max_length": 512,
66
  "pad_token": "[MASK]",