Upload wit_create_meta.py
Browse files- wit_create_meta.py +42 -0
wit_create_meta.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pathlib import Path
|
2 |
+
|
3 |
+
import pandas as pd
|
4 |
+
import torch
|
5 |
+
import ujson
|
6 |
+
import webdataset as wds
|
7 |
+
from tqdm import tqdm
|
8 |
+
|
9 |
+
|
10 |
+
def load_json(json):
|
11 |
+
return ujson.loads(json)
|
12 |
+
|
13 |
+
|
14 |
+
load_map = {
|
15 |
+
'json': load_json,
|
16 |
+
}
|
17 |
+
|
18 |
+
|
19 |
+
def get_glob(path):
|
20 |
+
return sorted(Path('.').absolute().glob(path))
|
21 |
+
|
22 |
+
|
23 |
+
def chunker(l, n):
|
24 |
+
for i in range(0, len(l), n):
|
25 |
+
yield l[i:i + n]
|
26 |
+
|
27 |
+
|
28 |
+
def func(glob_path, combine_all=True):
|
29 |
+
glob = get_glob(glob_path)
|
30 |
+
|
31 |
+
for file in tqdm(glob, position=0):
|
32 |
+
ds = wds.WebDataset(str(file)).map_dict(**load_map).to_tuple('json')
|
33 |
+
|
34 |
+
metas = pd.DataFrame([meta[0]
|
35 |
+
for meta in tqdm(ds, position=1, leave=False)])
|
36 |
+
|
37 |
+
metas.to_parquet(f'{file.stem}.parquet')
|
38 |
+
|
39 |
+
if combine_all:
|
40 |
+
combined = pd.concat(pd.read_parquet(
|
41 |
+
f'{file.stem}.parquet') for file in tqdm(glob))
|
42 |
+
combined.to_parquet('combined_meta.parquet')
|