File size: 1,539 Bytes
369594b
 
 
ab8d8bd
465e8d5
ab8d8bd
369594b
ab8d8bd
465e8d5
ab8d8bd
369594b
ab8d8bd
369594b
ab8d8bd
47fdc4c
ab8d8bd
369594b
ab8d8bd
03ab77d
ab8d8bd
 
 
 
 
d93de47
 
 
ab8d8bd
369594b
ab8d8bd
136ca9e
ab8d8bd
47fdc4c
ab8d8bd
583ff96
ab8d8bd
427fd39
465e8d5
 
d93de47
 
ab8d8bd
 
 
 
369594b
 
 
 
ab8d8bd
369594b
 
ab8d8bd
 
369594b
ab8d8bd
369594b
 
ab8d8bd
 
 
 
d93de47
 
 
 
 
 
 
 
 
 
 
369594b
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
---
dataset_info:
  features:
  - name: huggingface_hub
    dtype: int64
  - name: text_generation_inference
    dtype: int64
  - name: safetensors
    dtype: int64
  - name: tokenizers
    dtype: int64
  - name: transformers
    dtype: int64
  - name: diffusers
    dtype: int64
  - name: accelerate
    dtype: int64
  - name: chat_ui
    dtype: int64
  - name: candle
    dtype: int64
  - name: gradio
    dtype: int64
  - name: evaluate
    dtype: int64
  - name: pytorch_image_models
    dtype: int64
  - name: peft
    dtype: int64
  - name: optimum
    dtype: int64
  - name: datasets
    dtype: int64
  - name: hub_docs
    dtype: int64
  - name: langchain
    dtype: int64
  - name: stable_diffusion_webui
    dtype: int64
  - name: tensorflow
    dtype: int64
  - name: pytorch
    dtype: int64
  - name: openai_python
    dtype: int64
  - name: day
    dtype: string
  splits:
  - name: raw
    num_bytes: 19652
    num_examples: 101
  - name: wow
    num_bytes: 19844
    num_examples: 102
  - name: eom
    num_bytes: 19652
    num_examples: 101
  - name: eom_wow
    num_bytes: 19844
    num_examples: 102
  download_size: 76401
  dataset_size: 78992
configs:
- config_name: default
  data_files:
  - split: raw
    path: data/raw-*
  - split: wow
    path: data/wow-*
  - split: eom
    path: data/eom-*
  - split: eom_wow
    path: data/eom_wow-*
---
# Dataset Card for "preprocessed_issues"

[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)