Spaces:
Sleeping
Sleeping
import json | |
from datasets import load_dataset | |
class get_files: | |
def predefined_dataset(dataset_name): | |
global dataset # bad practice, I know... But just bear with me. Will later update to state dict. | |
dataset = load_dataset(dataset_name, split = "train") | |
return 'Successfully loaded dataset' | |
def uploaded_dataset(file): | |
global dataset # bad practice, I know... But just bear with me. Will later update to state dict. | |
dataset = [] | |
if file is None: | |
return "File not found. Please upload the file again." | |
try: | |
with open(file,'r') as file: | |
for line in file: | |
dataset.append(json.loads(line.strip())) | |
return "File retrieved." | |
except FileNotFoundError: | |
return "File not found. Please upload the file again." | |
def load_markdown_file(file_path): | |
try: | |
with open(file_path, 'r') as f: | |
return f.read() | |
except FileNotFoundError: | |
return "File not found. Please check the file path." | |
except Exception as e: | |
return f"Error loading file: {str(e)}" | |
def submit_weights(model, repository, model_out_name, token): | |
"""submits model to repository""" | |
repo = repository + '/' + model_out_name | |
model.push_to_hub(repo, token = token) | |
tokenizer.push_to_hub(repo, token = token) | |
return 0 | |