Liyulingyue commited on
Commit
1e2ed50
·
1 Parent(s): 37ce2cf

Delete dream_outputs

Browse files
dream_outputs/feature_extractor/preprocessor_config.json DELETED
@@ -1,28 +0,0 @@
1
- {
2
- "crop_size": {
3
- "height": 224,
4
- "width": 224
5
- },
6
- "do_center_crop": true,
7
- "do_convert_rgb": true,
8
- "do_normalize": true,
9
- "do_rescale": true,
10
- "do_resize": true,
11
- "feature_extractor_type": "CLIPFeatureExtractor",
12
- "image_mean": [
13
- 0.48145466,
14
- 0.4578275,
15
- 0.40821073
16
- ],
17
- "image_processor_type": "CLIPFeatureExtractor",
18
- "image_std": [
19
- 0.26862954,
20
- 0.26130258,
21
- 0.27577711
22
- ],
23
- "resample": 3,
24
- "rescale_factor": 0.00392156862745098,
25
- "size": {
26
- "shortest_edge": 224
27
- }
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dream_outputs/safety_checker/config.json DELETED
@@ -1,23 +0,0 @@
1
- {
2
- "architectures": [
3
- "StableDiffusionSafetyChecker"
4
- ],
5
- "attention_dropout": 0.0,
6
- "dropout": 0.0,
7
- "dtype": "float32",
8
- "hidden_act": "quick_gelu",
9
- "hidden_size": 1024,
10
- "image_size": 224,
11
- "initializer_factor": 1.0,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 4096,
14
- "layer_norm_eps": 1e-05,
15
- "model_type": "clip_vision_model",
16
- "num_attention_heads": 16,
17
- "num_channels": 3,
18
- "num_hidden_layers": 24,
19
- "paddlenlp_version": null,
20
- "patch_size": 14,
21
- "projection_dim": 768,
22
- "return_dict": true
23
- }