from pathlib import Path | |
import pandas as pd | |
import torch | |
import ujson | |
import webdataset as wds | |
from tqdm import tqdm | |
def load_json(json): | |
return ujson.loads(json) | |
load_map = { | |
'json': load_json, | |
} | |
def get_glob(path): | |
return sorted(Path('.').absolute().glob(path)) | |
def chunker(l, n): | |
for i in range(0, len(l), n): | |
yield l[i:i + n] | |
def func(glob_path, combine_all=True): | |
glob = get_glob(glob_path) | |
for file in tqdm(glob, position=0): | |
ds = wds.WebDataset(str(file)).map_dict(**load_map).to_tuple('json') | |
metas = pd.DataFrame([meta[0] | |
for meta in tqdm(ds, position=1, leave=False)]) | |
metas.to_parquet(f'{file.stem}.parquet') | |
if combine_all: | |
combined = pd.concat(pd.read_parquet( | |
f'{file.stem}.parquet') for file in tqdm(glob)) | |
combined.to_parquet('combined_meta.parquet') | |