-
-
Save manujosephv/e52addcf412d76fbabae11ef826aa362 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from transformers import Trainer, TrainingArguments | |
training_args = TrainingArguments( | |
output_dir="./storage/gpt2-motivational_v6", #The output directory | |
overwrite_output_dir=True, #overwrite the content of the output directory | |
num_train_epochs=10, # number of training epochs | |
per_gpu_train_batch_size=32, # batch size for training | |
per_gpu_eval_batch_size=64, # batch size for evaluation | |
logging_steps = 500, # Number of update steps between two evaluations. | |
save_steps=500, # after # steps model is saved | |
warmup_steps=500,# number of warmup steps for learning rate scheduler | |
) | |
trainer = Trainer( | |
model=model, | |
args=training_args, | |
data_collator=data_collator, | |
train_dataset=train_dataset, | |
eval_dataset=test_dataset, | |
prediction_loss_only=True, | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment