diff --git a/test.py b/test.py index 2b0102f..5d47e17 100644 --- a/test.py +++ b/test.py @@ -6,7 +6,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_path) tokenizer.pad_token = tokenizer.eos_token model.config.pad_token_id = tokenizer.eos_token_id -def generate_response(prompt, max_length=100): +def generate_response(prompt, max_length=1000): inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True) outputs = model.generate( inputs.input_ids,