kardosdrur
commited on
Commit
•
9c5a8e0
1
Parent(s):
e42e35e
Added source files
Browse files- src/align_corpora.py +34 -0
- src/build_corpus.py +23 -0
- src/filter_alignment.py +18 -0
- src/push_corpus.py +11 -0
src/align_corpora.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Aligns corpora based on the English text using Murmurhash3"""
|
2 |
+
import mmh3
|
3 |
+
import numpy as np
|
4 |
+
import polars as pl
|
5 |
+
|
6 |
+
with open("raw/europarl-v7.da-en.en") as in_file:
|
7 |
+
texts = list(in_file)
|
8 |
+
eng_da = pl.DataFrame(dict(text=texts, da_id=np.arange(len(texts))))
|
9 |
+
eng_da = eng_da.select(
|
10 |
+
pl.col("da_id"), pl.col("text").map_elements(mmh3.hash64).alias("hash")
|
11 |
+
)
|
12 |
+
eng_da = eng_da.select(
|
13 |
+
pl.col("da_id"),
|
14 |
+
# This is necessary as Polars doesn't support 128 bit integers.
|
15 |
+
pl.col("hash").list.first().alias("hash1"),
|
16 |
+
pl.col("hash").list.last().alias("hash2"),
|
17 |
+
)
|
18 |
+
|
19 |
+
with open("raw/europarl-v7.sv-en.en") as in_file:
|
20 |
+
texts = list(in_file)
|
21 |
+
eng_sv = pl.DataFrame(dict(text=texts, sv_id=np.arange(len(texts))))
|
22 |
+
eng_sv = eng_sv.select(
|
23 |
+
pl.col("sv_id"), pl.col("text").map_elements(mmh3.hash64).alias("hash")
|
24 |
+
)
|
25 |
+
eng_sv = eng_sv.select(
|
26 |
+
pl.col("sv_id"),
|
27 |
+
# This is necessary as Polars doesn't support 128 bit integers.
|
28 |
+
pl.col("hash").list.first().alias("hash1"),
|
29 |
+
pl.col("hash").list.last().alias("hash2"),
|
30 |
+
)
|
31 |
+
|
32 |
+
alignment = eng_sv.join(eng_da, on=["hash1", "hash2"], how="inner")
|
33 |
+
alignment = alignment.select(pl.col("da_id", "sv_id"))
|
34 |
+
alignment.write_parquet("alignment.parquet")
|
src/build_corpus.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Builds corpus based on the alignment."""
|
2 |
+
import polars as pl
|
3 |
+
|
4 |
+
with open("raw/europarl-v7.da-en.da") as in_file:
|
5 |
+
danish_texts = pl.Series(list(in_file))
|
6 |
+
|
7 |
+
with open("raw/europarl-v7.da-en.en") as in_file:
|
8 |
+
english_texts = pl.Series(list(in_file))
|
9 |
+
|
10 |
+
with open("raw/europarl-v7.sv-en.sv") as in_file:
|
11 |
+
swedish_texts = pl.Series(list(in_file))
|
12 |
+
|
13 |
+
alignment = pl.read_parquet("filtered_alignment.parquet")
|
14 |
+
|
15 |
+
corpus = pl.DataFrame(
|
16 |
+
dict(
|
17 |
+
da=danish_texts.take(alignment["da_id"]),
|
18 |
+
en=english_texts.take(alignment["da_id"]),
|
19 |
+
sv=swedish_texts.take(alignment["sv_id"]),
|
20 |
+
)
|
21 |
+
)
|
22 |
+
|
23 |
+
corpus.write_parquet("corpus.parquet")
|
src/filter_alignment.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Filters the alignment by removing all ids that occur multiple times."""
|
2 |
+
import polars as pl
|
3 |
+
|
4 |
+
alignment = pl.read_parquet("alignment.parquet")
|
5 |
+
|
6 |
+
# Selecting entries without hash collisions
|
7 |
+
good_da_ids = (
|
8 |
+
alignment["da_id"].value_counts().filter(pl.col("counts") == 1)["da_id"]
|
9 |
+
)
|
10 |
+
good_sv_ids = (
|
11 |
+
alignment["sv_id"].value_counts().filter(pl.col("counts") == 1)["sv_id"]
|
12 |
+
)
|
13 |
+
|
14 |
+
alignment = alignment.filter(
|
15 |
+
pl.col("da_id").is_in(good_da_ids), pl.col("sv_id").is_in(good_sv_ids)
|
16 |
+
)
|
17 |
+
|
18 |
+
alignment.write_parquet("filtered_alignment.parquet")
|
src/push_corpus.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Pushes corpus to hub."""
|
2 |
+
from functools import partial
|
3 |
+
|
4 |
+
import polars as pl
|
5 |
+
from datasets import Dataset
|
6 |
+
|
7 |
+
corpus = pl.read_parquet("corpus.parquet")
|
8 |
+
|
9 |
+
dataset = Dataset.from_generator(partial(corpus.iter_rows, named=True))
|
10 |
+
dataset = dataset.train_test_split(test_size=0.2, shuffle=True)
|
11 |
+
dataset.push_to_hub("kardosdrur/europarl-scandinavian")
|