machinelearning147 commited on
Commit
bfa01d0
1 Parent(s): 57a7c34

Add application file

Browse files
Files changed (1) hide show
  1. app.py +26 -0
app.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
+ device = "cuda" if torch.cuda.is_available() else "cpu"
6
+ model_path = "ibm-granite/granite-3b-code-base"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_path)
8
+ model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
9
+ model.eval()
10
+
11
+ def generate_code(input_text):
12
+ input_tokens = tokenizer(input_text, return_tensors="pt")
13
+ for i in input_tokens:
14
+ input_tokens[i] = input_tokens[i].to(device)
15
+ output = model.generate(**input_tokens, max_new_tokens=200)
16
+ output_text = tokenizer.batch_decode(output, skip_special_tokens=True)[0]
17
+ return output_text
18
+
19
+ # Gradio Interface
20
+ iface = gr.Interface(
21
+ fn=generate_code,
22
+ inputs=gr.inputs.Textbox(lines=2, placeholder="Enter code snippet here..."),
23
+ outputs="text"
24
+ )
25
+
26
+ iface.launch()