10 lines
363 B
Python
10 lines
363 B
Python
from transformers import MarianForCausalLM, MarianTokenizer, TrainingArguments
|
|
|
|
model_name = "allegro/multislav-5lang"
|
|
model = MarianForCausalLM.from_pretrained(model_name)
|
|
tokenizer = MarianTokenizer.from_pretrained(model_name)
|
|
|
|
model.save_pretrained("./models/ably")
|
|
tokenizer.save_pretrained("./models/ably")
|
|
|
|
print("✅ Model został wytrenowany i zapisany!") |