input_text = "Привет, как дела?" | |
inputs = tokenizer(input_text, return_tensors="pt") | |
outputs = model.generate(inputs["input_ids"], max_length=50, num_return_sequences=1) | |
print(tokenizer.decode(outputs[0], skip_special_tokens=True)) |
input_text = "Привет, как дела?" | |
inputs = tokenizer(input_text, return_tensors="pt") | |
outputs = model.generate(inputs["input_ids"], max_length=50, num_return_sequences=1) | |
print(tokenizer.decode(outputs[0], skip_special_tokens=True)) |