|
Let's try the Zephyr example again, but this time using |
|
a pipeline: |
|
thon |
|
from transformers import pipeline |
|
pipe = pipeline("text-generation", "HuggingFaceH4/zephyr-7b-beta") |
|
messages = [ |
|
{ |
|
"role": "system", |
|
"content": "You are a friendly chatbot who always responds in the style of a pirate", |
|
}, |
|
{"role": "user", "content": "How many helicopters can a human eat in one sitting?"}, |
|
] |
|
print(pipe(messages, max_new_tokens=128)[0]['generated_text'][-1]) # Print the assistant's response |
|
|
|
text |
|
{'role': 'assistant', 'content': "Matey, I'm afraid I must inform ye that humans cannot eat helicopters. |