Update handler.py
Browse files- handler.py +3 -2
handler.py
CHANGED
@@ -35,7 +35,8 @@ class EndpointHandler:
|
|
35 |
image_url = inputs.get("url")
|
36 |
image_data = inputs.get("image_data")
|
37 |
prompt = inputs.get("prompt")
|
38 |
-
|
|
|
39 |
if not prompt:
|
40 |
logs.append("S'ha de proporcionar 'prompt' en 'inputs'.")
|
41 |
return {"error": "The 'prompt' must be provided in 'inputs'.", "logs": logs}
|
@@ -70,7 +71,7 @@ class EndpointHandler:
|
|
70 |
try:
|
71 |
logs.append("Processant imatge amb el model.")
|
72 |
inputs = self.processor(prompt, image, return_tensors="pt").to("cuda")
|
73 |
-
output = self.model.generate(**inputs, max_new_tokens=
|
74 |
result = self.processor.decode(output[0], skip_special_tokens=True)
|
75 |
logs.append("Processament complet.")
|
76 |
return {"input_prompt": prompt, "model_output": result, "logs": logs}
|
|
|
35 |
image_url = inputs.get("url")
|
36 |
image_data = inputs.get("image_data")
|
37 |
prompt = inputs.get("prompt")
|
38 |
+
max_tokens = inputs.get("max_tokens", 100)
|
39 |
+
|
40 |
if not prompt:
|
41 |
logs.append("S'ha de proporcionar 'prompt' en 'inputs'.")
|
42 |
return {"error": "The 'prompt' must be provided in 'inputs'.", "logs": logs}
|
|
|
71 |
try:
|
72 |
logs.append("Processant imatge amb el model.")
|
73 |
inputs = self.processor(prompt, image, return_tensors="pt").to("cuda")
|
74 |
+
output = self.model.generate(**inputs, max_new_tokens=max_tokens)
|
75 |
result = self.processor.decode(output[0], skip_special_tokens=True)
|
76 |
logs.append("Processament complet.")
|
77 |
return {"input_prompt": prompt, "model_output": result, "logs": logs}
|