import streamlit as st from PIL import Image import torch.nn as nn import timm import torch import time import torchmetrics from torchmetrics import F1Score,Recall,Accuracy import torch.optim.lr_scheduler as lr_scheduler import torchvision.models as models import lightning.pytorch as pl import torchvision from lightning.pytorch.loggers import WandbLogger import captum import matplotlib.pyplot as plt import json from transformers import pipeline, set_seed from transformers import BioGptTokenizer, BioGptForCausalLM text_model = BioGptForCausalLM.from_pretrained("microsoft/biogpt") tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt") labels_path = 'labels.json' import os import base64 with open(labels_path) as json_data: idx_to_labels = json.load(json_data) class FineTuneModel(pl.LightningModule): def __init__(self, model_name, num_classes, learning_rate, dropout_rate,beta1,beta2,eps): super().__init__() self.model_name = model_name self.num_classes = num_classes self.learning_rate = learning_rate self.beta1 = beta1 self.beta2 = beta2 self.eps = eps self.dropout_rate = dropout_rate self.model = timm.create_model(self.model_name, pretrained=True,num_classes=self.num_classes) self.loss_fn = nn.CrossEntropyLoss() self.f1 = F1Score(task='multiclass', num_classes=self.num_classes) self.recall = Recall(task='multiclass', num_classes=self.num_classes) self.accuracy = Accuracy(task='multiclass', num_classes=self.num_classes) #for param in self.model.parameters(): #param.requires_grad = True #self.model.classifier= nn.Sequential(nn.Dropout(p=self.dropout_rate),nn.Linear(self.model.classifier.in_features, self.num_classes)) #self.model.classifier.requires_grad = True def forward(self, x): return self.model(x) def training_step(self, batch, batch_idx): x, y = batch y_hat = self.model(x) loss = self.loss_fn(y_hat, y) acc = self.accuracy(y_hat.argmax(dim=1),y) f1 = self.f1(y_hat.argmax(dim=1),y) recall = self.recall(y_hat.argmax(dim=1),y) self.log('train_loss', loss,on_step=False,on_epoch=True) self.log('train_acc', acc,on_step=False,on_epoch = True) self.log('train_f1',f1,on_step=False,on_epoch=True) self.log('train_recall',recall,on_step=False,on_epoch=True) return loss def validation_step(self, batch, batch_idx): x, y = batch y_hat = self.model(x) loss = self.loss_fn(y_hat, y) acc = self.accuracy(y_hat.argmax(dim=1),y) f1 = self.f1(y_hat.argmax(dim=1),y) recall = self.recall(y_hat.argmax(dim=1),y) self.log('val_loss', loss,on_step=False,on_epoch=True) self.log('val_acc', acc,on_step=False,on_epoch=True) self.log('val_f1',f1,on_step=False,on_epoch=True) self.log('val_recall',recall,on_step=False,on_epoch=True) def configure_optimizers(self): optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate,betas=(self.beta1,self.beta2),eps=self.eps) scheduler = lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1) return {'optimizer': optimizer, 'lr_scheduler': scheduler} #load model # Get the current working directory current_dir = os.getcwd() # Construct the absolute path to the logo.png file logo_path = os.path.join(current_dir, "logo.png") with open(logo_path, "rb") as f: image_data = f.read() image_base64 = base64.b64encode(image_data).decode("utf-8") # Add custom CSS for the header header_css = """ """ # Render the custom CSS st.markdown(header_css, unsafe_allow_html=True) # Render the header header_html = f"""
Disclaimer: This web app is for demonstration purposes only and not intended for commercial use. Contact: contact@1001epochs.co.uk for full solution.