highdeff1 / token creation from custom model.py
highdeff's picture
Upload 16 files
2c07569
raw
history blame contribute delete
366 Bytes
from transformers import AutoTokenizer, AutoModelForQuestionAnswering
# Load your pretrained model
model_path = 'C:\\Users\\money\\OneDrive\\Pictures\\Blank Model\\untrained\\results'
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForQuestionAnswering.from_pretrained(model_path)
# Save the tokenizer
tokenizer.save_pretrained('tokenizer')