import torch from transformers import AutoModelForCausalLM, AutoTokenizer import numpy as np import gradio as gr import spaces tokenizer = AutoTokenizer.from_pretrained("gpt2") model = AutoModelForCausalLM.from_pretrained("gpt2") tokenizer.pad_token_id = tokenizer.eos_token_id print("Loading finished.") print(f"Is CUDA available: {torch.cuda.is_available()}") # True if torch.cuda.is_available(): print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}") STYLE = """ .custom-container { width: 100%; display: grid; align-items: center; margin: 0!important; overflow: scroll; } .prose ul ul { margin: 0!important; font-size: 10px!important; } .prose td, th { padding-left: 2px; padding-right: 2px; padding-top: 0; padding-bottom: 0; } .tree { padding: 0px; margin: 0!important; box-sizing: border-box; font-size: 10px; width: 100%; min-width: 2000px; height: auto; text-align: center; } .tree ul { padding-top: 20px; position: relative; transition: .5s; margin: 0!important; display: flex; flex-direction: row; justify-content: center; gap:10px; } .tree li { display: inline-table; text-align: center; list-style-type: none; position: relative; padding-top: 10px; transition: .5s; } .tree li::before, .tree li::after { content: ''; position: absolute; top: 0; right: 50%; border-top: 1px solid #ccc; width: 51%; height: 10px; } .tree li::after { right: auto; left: 50%; border-left: 1px solid #ccc; } .tree li:only-child::after, .tree li:only-child::before { display: none; } .tree li:only-child { padding-top: 0; } .tree li:first-child::before, .tree li:last-child::after { border: 0 none; } .tree li:last-child::before { border-right: 1px solid #ccc; border-radius: 0 5px 0 0; -webkit-border-radius: 0 5px 0 0; -moz-border-radius: 0 5px 0 0; } .tree li:first-child::after { border-radius: 5px 0 0 0; -webkit-border-radius: 5px 0 0 0; -moz-border-radius: 5px 0 0 0; } .tree ul ul::before { content: ''; position: absolute; top: 0; left: 50%; border-left: 1px solid #ccc; width: 0; height: 20px; } .tree li a { border: 1px solid #ccc; padding: 5px; display: inline-grid; border-radius: 5px; text-decoration-line: none; border-radius: 5px; transition: .5s; } .tree li a span { color: #666; padding: 5px; font-size: 12px; text-transform: uppercase; letter-spacing: 1px; font-weight: 500; } /*Hover-Section*/ .tree li a:hover, .tree li a:hover i, .tree li a:hover span, .tree li a:hover+ul li a { background: #c8e4f8; color: #000; } .tree li a:hover+ul li::after, .tree li a:hover+ul li::before, .tree li a:hover+ul::before, .tree li a:hover+ul ul::before { border-color: #94a0b4; } .chosen { background-color: red; } """ def generate_nodes(token, node): """Recursively generate HTML for the tree nodes.""" html_content = f"
Token | Step score | Total score |
---|---|---|
{token} | {scores[token_idx]:.4f} | {scores[token_idx] + sequence_prob:.4f} |