From 8ee5f5cbd986b3489a1cf89a11e950a7d65b9258 Mon Sep 17 00:00:00 2001 From: "l.gabrysiak" Date: Wed, 26 Feb 2025 10:22:13 +0100 Subject: [PATCH] test --- test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test.py b/test.py index 2b0102f..5d47e17 100644 --- a/test.py +++ b/test.py @@ -6,7 +6,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_path) tokenizer.pad_token = tokenizer.eos_token model.config.pad_token_id = tokenizer.eos_token_id -def generate_response(prompt, max_length=100): +def generate_response(prompt, max_length=1000): inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True) outputs = model.generate( inputs.input_ids,