NickyNicky
commited on
Commit
•
e7656e9
1
Parent(s):
878fb8d
Update README.md
Browse files
README.md
CHANGED
@@ -234,17 +234,17 @@ en español responde solo en json:
|
|
234 |
inputs = tokenizer.encode(txt,
|
235 |
return_tensors="pt",
|
236 |
add_special_tokens=False).to("cuda:0")
|
237 |
-
max_new_tokens=
|
238 |
generation_config = GenerationConfig(
|
239 |
max_new_tokens=max_new_tokens,
|
240 |
-
temperature=0.
|
241 |
-
#top_p=0.
|
242 |
-
top_k=
|
243 |
-
repetition_penalty=1
|
244 |
do_sample=True,
|
245 |
)
|
246 |
-
outputs =
|
247 |
input_ids=inputs,
|
248 |
stopping_criteria=stopping_criteria_list,)
|
249 |
-
tokenizer.decode(outputs[0], skip_special_tokens=False)
|
250 |
```
|
|
|
234 |
inputs = tokenizer.encode(txt,
|
235 |
return_tensors="pt",
|
236 |
add_special_tokens=False).to("cuda:0")
|
237 |
+
max_new_tokens=1500
|
238 |
generation_config = GenerationConfig(
|
239 |
max_new_tokens=max_new_tokens,
|
240 |
+
temperature=0.15,
|
241 |
+
# top_p=0.55,
|
242 |
+
top_k=50,
|
243 |
+
repetition_penalty=1.1,
|
244 |
do_sample=True,
|
245 |
)
|
246 |
+
outputs = base_model.generate(generation_config=generation_config,
|
247 |
input_ids=inputs,
|
248 |
stopping_criteria=stopping_criteria_list,)
|
249 |
+
print(tokenizer.decode(outputs[0], skip_special_tokens=False) )
|
250 |
```
|