mod gpt
This commit is contained in:
parent
e588d3af66
commit
01005c52c5
9
gpt.py
9
gpt.py
|
|
@ -37,15 +37,18 @@ def main():
|
|||
tokenized_dataset = dataset.map(tokenize_function, batched=True)
|
||||
|
||||
# Model
|
||||
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
|
||||
model = AutoModelForCausalLM.from_pretrained(
|
||||
MODEL_NAME,
|
||||
mean_resizing=False # Wyłączenie ostrzeżenia
|
||||
)
|
||||
model.resize_token_embeddings(len(tokenizer))
|
||||
|
||||
# Konfiguracja treningu
|
||||
training_args = TrainingArguments(
|
||||
output_dir="./results",
|
||||
num_train_pochs=1,
|
||||
num_train_epochs=1, # Poprawiona nazwa parametru
|
||||
per_device_train_batch_size=2,
|
||||
remove_unused_columns=True, # Kluczowa zmiana
|
||||
remove_unused_columns=True,
|
||||
logging_steps=1
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue