fb-housing-posts / main.py
hoaj's picture
Add .gitignore file and main.py script
7597f56
raw
history blame
779 Bytes
from datasets import load_dataset, DatasetDict
# Load your dataset from a local JSONL file
dataset = load_dataset("json", data_files="listing.jsonl")
# Split the dataset into train and temp
split_dataset = dataset["train"].train_test_split(test_size=0.3)
train_dataset = split_dataset["train"]
temp_dataset = split_dataset["test"]
# Split the temp dataset into validation and test
split_temp_dataset = temp_dataset.train_test_split(test_size=0.2)
validation_dataset = split_temp_dataset["train"]
test_dataset = split_temp_dataset["test"]
# Combine all splits into a DatasetDict
final_dataset = DatasetDict(
{"train": train_dataset, "validation": validation_dataset, "test": test_dataset}
)
# Save the split datasets to disk
final_dataset.save_to_disk("listing.jsonl")