File size: 1,102 Bytes
369594b
 
 
2141795
 
 
583ff96
 
 
2141795
 
0c0d47c
369594b
2141795
369594b
583ff96
369594b
583ff96
369594b
2141795
369594b
2141795
136ca9e
2141795
369594b
2141795
0c0d47c
583ff96
 
427fd39
 
2141795
 
369594b
 
 
 
583ff96
369594b
 
2141795
 
369594b
583ff96
369594b
 
2141795
 
 
 
369594b
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
---
dataset_info:
  features:
  - name: evaluate
    dtype: int64
  - name: tokenizers
    dtype: int64
  - name: optimum
    dtype: int64
  - name: huggingface_hub
    dtype: int64
  - name: diffusers
    dtype: int64
  - name: datasets
    dtype: int64
  - name: accelerate
    dtype: int64
  - name: gradio
    dtype: int64
  - name: transformers
    dtype: int64
  - name: peft
    dtype: int64
  - name: pytorch_image_models
    dtype: int64
  - name: hub_docs
    dtype: int64
  - name: stable_diffusion_webui
    dtype: int64
  - name: pytorch
    dtype: int64
  - name: tensorflow
    dtype: int64
  - name: day
    dtype: string
  splits:
  - name: raw
    num_bytes: 14726
    num_examples: 101
  - name: wow
    num_bytes: 14870
    num_examples: 102
  - name: eom
    num_bytes: 14726
    num_examples: 101
  - name: eom_wow
    num_bytes: 14870
    num_examples: 102
  download_size: 57633
  dataset_size: 59192
---
# Dataset Card for "preprocessed_issues"

[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)