Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -60,9 +60,9 @@ print(tokenizer.apply_chat_template([{"role": "user", "content": "Why is the sky
|
|
60 |
|
61 |
config = LlamaConfig(
|
62 |
vocab_size=tokenizer.vocab_size,
|
63 |
-
hidden_size=int(512
|
64 |
intermediate_size=int(1024 / 1),
|
65 |
-
num_hidden_layers=int(8
|
66 |
num_attention_heads=int(8 / 1),
|
67 |
max_position_embeddings=int(512 / 1),
|
68 |
rms_norm_eps=1e-6,
|
@@ -99,7 +99,7 @@ print(dataset['text'][2])
|
|
99 |
args = TrainingArguments(
|
100 |
output_dir="mayo",
|
101 |
num_train_epochs=16,
|
102 |
-
per_device_train_batch_size=
|
103 |
gradient_accumulation_steps=4,
|
104 |
learning_rate=1e-5,
|
105 |
save_steps=100000,
|
|
|
60 |
|
61 |
config = LlamaConfig(
|
62 |
vocab_size=tokenizer.vocab_size,
|
63 |
+
hidden_size=int(512 * 2),
|
64 |
intermediate_size=int(1024 / 1),
|
65 |
+
num_hidden_layers=int(8 * 2),
|
66 |
num_attention_heads=int(8 / 1),
|
67 |
max_position_embeddings=int(512 / 1),
|
68 |
rms_norm_eps=1e-6,
|
|
|
99 |
args = TrainingArguments(
|
100 |
output_dir="mayo",
|
101 |
num_train_epochs=16,
|
102 |
+
per_device_train_batch_size=32,
|
103 |
gradient_accumulation_steps=4,
|
104 |
learning_rate=1e-5,
|
105 |
save_steps=100000,
|