gabrielaltay albertvillanova HF staff commited on
Commit
aa817f0
1 Parent(s): 882ec24

Fix loading and support streaming (#2)

Browse files

- Add data files (daf3e25da921ef394ca09c4951645041c41d8318)
- Update loading script with hosted data (e0421fe90a9d25b05fe6acca257ac7eccfb69544)
- Support streaming in parse_brat_file (2ee733db8c9f630266b93e7d1ea1ad67cd213518)
- Fix style (054eb3272a28b702827325bc6c099a262eb04b8f)


Co-authored-by: Albert Villanova <[email protected]>

bionlp_st_2019_bb.py CHANGED
@@ -31,7 +31,7 @@ _DISPLAYNAME = "BioNLP 2019 BB"
31
  _SOURCE_VIEW_NAME = "source"
32
  _UNIFIED_VIEW_NAME = "bigbio"
33
 
34
- _LANGUAGES = ['English']
35
  _PUBMED = True
36
  _LOCAL = False
37
  _CITATION = """\
@@ -76,48 +76,16 @@ and applications in microbiology.
76
 
77
  _HOMEPAGE = "https://sites.google.com/view/bb-2019/dataset"
78
 
79
- _LICENSE = 'License information unavailable'
80
 
 
 
81
  _URLs = {
82
- "source": {
83
- "norm": {
84
- "train": "https://drive.google.com/uc?export=download&id=1aXbshxgytZ1Dhbmw7OULPFarPO1FbcM3",
85
- "dev": "https://drive.google.com/uc?export=download&id=14jRZWF8VeluEYrV9EybV3LeGm4q5nH6s",
86
- "test": "https://drive.google.com/uc?export=download&id=1BPDCFTVMmIlOowYA-DkeNNFjwTfHYPG6",
87
- },
88
- "norm+ner": {
89
- "train": "https://drive.google.com/uc?export=download&id=1yKxBPMej8EYdVeU4QS1xquFfXM76F-2K",
90
- "dev": "https://drive.google.com/uc?export=download&id=1Xk7h9bax533QWclO3Ur7aS07OATBF_bG",
91
- "test": "https://drive.google.com/uc?export=download&id=1Cb5hQIPS3LIeUL-UWdqyWfKB52xUz9cp",
92
- },
93
- "rel": {
94
- "train": "https://drive.google.com/uc?export=download&id=1gnc-ScNpssC3qrA7cVox4Iei7i96sYqC",
95
- "dev": "https://drive.google.com/uc?export=download&id=1wJM9XOfmvIBcX23t9bzQX5fLZwWQJIwS",
96
- "test": "https://drive.google.com/uc?export=download&id=1smhKA4LEPK5UJEyBLseq0mBaT9REUevu",
97
- },
98
- "rel+ner": {
99
- "train": "https://drive.google.com/uc?export=download&id=1CPx9NxTPQbygqMtxw3d0hNFajhecqgss",
100
- "dev": "https://drive.google.com/uc?export=download&id=1lVyCCuAJ5TmmTDz4S0dISBNiWGR745_7",
101
- "test": "https://drive.google.com/uc?export=download&id=1uE8oY5m-7mSA-W-e6vownnAVV97IwHhA",
102
- },
103
- "kb": {
104
- "train": "https://drive.google.com/uc?export=download&id=1Iuce3T_IArXWBbIJ7RXb_STaPnWKQBN-",
105
- "dev": "https://drive.google.com/uc?export=download&id=14yON_Tc9dm8esWYDVxL-krw23sgTCcdL",
106
- "test": "https://drive.google.com/uc?export=download&id=1wVqI_t9mirGUk71BkwkcKJv0VNGyaHDs",
107
- },
108
- "kb+ner": {
109
- "train": "https://drive.google.com/uc?export=download&id=1WMl9eD4OZXq8zkkmHp3hSEvAqaAVui6L",
110
- "dev": "https://drive.google.com/uc?export=download&id=1oOfOfjIfg1XnesXwaKvSDfqgnchuximG",
111
- "test": "https://drive.google.com/uc?export=download&id=1_dRbgpGJUBCfF-iN2qOAgOBRvYmE7byW",
112
- },
113
- },
114
- "bigbio_kb": {
115
- "kb+ner": {
116
- "train": "https://drive.google.com/uc?export=download&id=1WMl9eD4OZXq8zkkmHp3hSEvAqaAVui6L",
117
- "dev": "https://drive.google.com/uc?export=download&id=1oOfOfjIfg1XnesXwaKvSDfqgnchuximG",
118
- "test": "https://drive.google.com/uc?export=download&id=1_dRbgpGJUBCfF-iN2qOAgOBRvYmE7byW",
119
- },
120
- },
121
  }
122
 
123
  _SUPPORTED_TASKS = [
@@ -282,48 +250,47 @@ class bionlp_st_2019_bb(datasets.GeneratorBasedBuilder):
282
  def _split_generators(
283
  self, dl_manager: datasets.DownloadManager
284
  ) -> List[datasets.SplitGenerator]:
285
- version = self.config.name.split("_")[-2]
286
- if version == "bigbio":
287
- version = "kb+ner"
288
- my_urls = _URLs[self.config.schema][version]
289
- data_files = {
290
- "train": Path(dl_manager.download_and_extract(my_urls["train"]))
291
- / f"BioNLP-OST-2019_BB-{version}_train",
292
- "dev": Path(dl_manager.download_and_extract(my_urls["dev"]))
293
- / f"BioNLP-OST-2019_BB-{version}_dev",
294
- "test": Path(dl_manager.download_and_extract(my_urls["test"]))
295
- / f"BioNLP-OST-2019_BB-{version}_test",
296
- }
297
  return [
298
  datasets.SplitGenerator(
299
  name=datasets.Split.TRAIN,
300
- gen_kwargs={"data_files": data_files["train"]},
301
  ),
302
  datasets.SplitGenerator(
303
  name=datasets.Split.VALIDATION,
304
- gen_kwargs={"data_files": data_files["dev"]},
305
  ),
306
  datasets.SplitGenerator(
307
  name=datasets.Split.TEST,
308
- gen_kwargs={"data_files": data_files["test"]},
309
  ),
310
  ]
311
 
312
  def _generate_examples(self, data_files: Path):
313
  if self.config.schema == "source":
314
- txt_files = list(data_files.glob("*txt"))
315
- for guid, txt_file in enumerate(txt_files):
 
 
 
316
  example = self.parse_brat_file(txt_file)
317
  example["id"] = str(guid)
318
  yield guid, example
 
319
  elif self.config.schema == "bigbio_kb":
320
- txt_files = list(data_files.glob("*txt"))
321
- for guid, txt_file in enumerate(txt_files):
322
- example = brat_parse_to_bigbio_kb(
323
- self.parse_brat_file(txt_file)
324
- )
 
325
  example["id"] = str(guid)
326
  yield guid, example
 
327
  else:
328
  raise ValueError(f"Invalid config: {self.config.name}")
329
 
@@ -447,9 +414,11 @@ class bionlp_st_2019_bb(datasets.GeneratorBasedBuilder):
447
  ann_lines = []
448
  for suffix in annotation_file_suffixes:
449
  annotation_file = txt_file.with_suffix(suffix)
450
- if annotation_file.exists():
451
  with annotation_file.open(encoding="utf8") as f:
452
  ann_lines.extend(f.readlines())
 
 
453
 
454
  example["text_bound_annotations"] = []
455
  example["events"] = []
 
31
  _SOURCE_VIEW_NAME = "source"
32
  _UNIFIED_VIEW_NAME = "bigbio"
33
 
34
+ _LANGUAGES = ["English"]
35
  _PUBMED = True
36
  _LOCAL = False
37
  _CITATION = """\
 
76
 
77
  _HOMEPAGE = "https://sites.google.com/view/bb-2019/dataset"
78
 
79
+ _LICENSE = "License information unavailable"
80
 
81
+ _SUBTASKS = ["norm", "norm+ner", "rel", "rel+ner", "kb", "kb+ner"]
82
+ _FILENAMES = ["train", "dev", "test"]
83
  _URLs = {
84
+ subtask: {
85
+ filename: f"data/{subtask}/BioNLP-OST-2019_BB-{subtask}_{filename}.zip"
86
+ for filename in _FILENAMES
87
+ }
88
+ for subtask in _SUBTASKS
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  }
90
 
91
  _SUPPORTED_TASKS = [
 
250
  def _split_generators(
251
  self, dl_manager: datasets.DownloadManager
252
  ) -> List[datasets.SplitGenerator]:
253
+ subtask = self.config.name.split("_")[4]
254
+ if subtask == "bigbio":
255
+ subtask = "kb+ner"
256
+ my_urls = _URLs[subtask]
257
+ data_files = dl_manager.download_and_extract(my_urls)
 
 
 
 
 
 
 
258
  return [
259
  datasets.SplitGenerator(
260
  name=datasets.Split.TRAIN,
261
+ gen_kwargs={"data_files": dl_manager.iter_files(data_files["train"])},
262
  ),
263
  datasets.SplitGenerator(
264
  name=datasets.Split.VALIDATION,
265
+ gen_kwargs={"data_files": dl_manager.iter_files(data_files["dev"])},
266
  ),
267
  datasets.SplitGenerator(
268
  name=datasets.Split.TEST,
269
+ gen_kwargs={"data_files": dl_manager.iter_files(data_files["test"])},
270
  ),
271
  ]
272
 
273
  def _generate_examples(self, data_files: Path):
274
  if self.config.schema == "source":
275
+ guid = 0
276
+ for data_file in data_files:
277
+ txt_file = Path(data_file)
278
+ if txt_file.suffix != ".txt":
279
+ continue
280
  example = self.parse_brat_file(txt_file)
281
  example["id"] = str(guid)
282
  yield guid, example
283
+ guid += 1
284
  elif self.config.schema == "bigbio_kb":
285
+ guid = 0
286
+ for data_file in data_files:
287
+ txt_file = Path(data_file)
288
+ if txt_file.suffix != ".txt":
289
+ continue
290
+ example = brat_parse_to_bigbio_kb(self.parse_brat_file(txt_file))
291
  example["id"] = str(guid)
292
  yield guid, example
293
+ guid += 1
294
  else:
295
  raise ValueError(f"Invalid config: {self.config.name}")
296
 
 
414
  ann_lines = []
415
  for suffix in annotation_file_suffixes:
416
  annotation_file = txt_file.with_suffix(suffix)
417
+ try:
418
  with annotation_file.open(encoding="utf8") as f:
419
  ann_lines.extend(f.readlines())
420
+ except Exception:
421
+ continue
422
 
423
  example["text_bound_annotations"] = []
424
  example["events"] = []
data/kb+ner/BioNLP-OST-2019_BB-kb+ner_dev.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:826bf7711b295557ec518a7828de448741bf1f8a6e7de02771dbbb4b33b3e9e3
3
+ size 140927
data/kb+ner/BioNLP-OST-2019_BB-kb+ner_test.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d4875dd0a2fd8a81cba9ade5a7f12b589767e6373f7e5edd7c60177a6b6adea
3
+ size 138265
data/kb+ner/BioNLP-OST-2019_BB-kb+ner_train.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73845eda51d1c0f7dab64d8d1660dc8e1f7cb3109b1275819f6ee32de6e9982f
3
+ size 278279
data/kb/BioNLP-OST-2019_BB-kb_dev.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef774dc4df3a00577106d3fd3e9a7d5f6187a5e2aa3b15edaf91997449ec6a55
3
+ size 133209
data/kb/BioNLP-OST-2019_BB-kb_test.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bac030a47ba15b3320732adafc2e9a4a7c5df2b1e2f80c61052f65884b9f836
3
+ size 157340
data/kb/BioNLP-OST-2019_BB-kb_train.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a8c5e824b99bd2fa77cb751b059fc72d70e508181ae92db38630687d87c2564
3
+ size 250631
data/norm+ner/BioNLP-OST-2019_BB-norm+ner_dev.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c98c85dc1374de87457edf3b5b66a1329f33bdcb056f193751944e31d6fc8589
3
+ size 137838
data/norm+ner/BioNLP-OST-2019_BB-norm+ner_test.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54eff5a2dbe20437a8995a9eab9710f477e30a17abf5bff02ef2d489e9468c66
3
+ size 139805
data/norm+ner/BioNLP-OST-2019_BB-norm+ner_train.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9b5a2a8eb3ca648482a192dd24240e3ab58aaf2a5818c8d3f9e5cbc914874a1
3
+ size 271933
data/norm/BioNLP-OST-2019_BB-norm_dev.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12f69cb434df877965888ed80062649f7756d815414687b0a3895a8b9079660c
3
+ size 132646
data/norm/BioNLP-OST-2019_BB-norm_test.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3aa5edaede2dd259c902b6f3c8b7fbdb5c9ecc46b0f0260ca01d90f1b4273026
3
+ size 162450
data/norm/BioNLP-OST-2019_BB-norm_train.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e2e41a09d3f1d963b215ac86de948cd17a7a49692109bae9dae5b8a8cfbbf8f
3
+ size 259327
data/rel+ner/BioNLP-OST-2019_BB-rel+ner_dev.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:924ccfa71914ba0bb8d2488e68f1d2cab05945c8353d4f414a4a0cec422e6453
3
+ size 130535
data/rel+ner/BioNLP-OST-2019_BB-rel+ner_test.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce4b2f668439c101ece1fbbdc9c5f0c9aeb150a8b8b6fe64f8791d22ecc4888b
3
+ size 139035
data/rel+ner/BioNLP-OST-2019_BB-rel+ner_train.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6094619bfa826cd44e97236cfe330e3cba3f495aedf17522b4c3952b1e5b68f
3
+ size 258239
data/rel/BioNLP-OST-2019_BB-rel_dev.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0025e85d5e160085971a5e8d00d1d6faae77bcf9dbf29166a0ee602dc13790cc
3
+ size 122736
data/rel/BioNLP-OST-2019_BB-rel_test.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64086a5835d5210ddc7aaa19bd24d970faa26f039c2af1128779715dddde1f84
3
+ size 158581
data/rel/BioNLP-OST-2019_BB-rel_train.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b412a477eb6bd38fc310ec944972342e7e00f9383f567b82f58920d0137ca3b
3
+ size 230990