BeardedJohn commited on
Commit
e672fc9
1 Parent(s): e7a5c9f

Upload ubb-endava-conll-assistant-ner.py

Browse files
Files changed (1) hide show
  1. ubb-endava-conll-assistant-ner.py +137 -0
ubb-endava-conll-assistant-ner.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+
18
+
19
+ import os
20
+
21
+ import datasets
22
+
23
+
24
+ logger = datasets.logging.get_logger(__name__)
25
+
26
+
27
+ _CITATION = ""
28
+
29
+ _DESCRIPTION = ""
30
+
31
+ #_URL = "."
32
+ _TRAINING_FILE = "train.txt"
33
+ _DEV_FILE = "validation.txt"
34
+ _TEST_FILE = "test.txt"
35
+
36
+
37
+ class UBBDemoConfig(datasets.BuilderConfig):
38
+ """BuilderConfig for UBBDemo"""
39
+
40
+ def __init__(self, **kwargs):
41
+ """BuilderConfig for UBBDemo.
42
+ Args:
43
+ **kwargs: keyword arguments forwarded to super.
44
+ """
45
+ super(UBBDemoConfig, self).__init__(**kwargs)
46
+
47
+
48
+ class UBBDemo(datasets.GeneratorBasedBuilder):
49
+ """UBBDemo dataset."""
50
+
51
+ BUILDER_CONFIGS = [
52
+ UBBDemoConfig(name="UBBDemo", version=datasets.Version("1.0.0"), description="UBBDemo dataset"),
53
+ ]
54
+
55
+ def _info(self):
56
+ return datasets.DatasetInfo(
57
+ description=_DESCRIPTION,
58
+ features=datasets.Features(
59
+ {
60
+ "id": datasets.Value("string"),
61
+ "tokens": datasets.Sequence(datasets.Value("string")),
62
+ "ner_tags": datasets.Sequence(
63
+ datasets.features.ClassLabel(
64
+ names=[
65
+ "O",
66
+ "B-PER",
67
+ "I-PER",
68
+ "B-ORG",
69
+ "I-ORG",
70
+ "B-LOC",
71
+ "I-LOC",
72
+ "B-MISC",
73
+ "I-MISC",
74
+ "B-PROJ",
75
+ "I-PROJ",
76
+ "B-ROLE",
77
+ "I-ROLE",
78
+ "B-TEAM",
79
+ "I-TEAM",
80
+ "B-FILE",
81
+ "I-FILE"
82
+ ]
83
+ )
84
+ ),
85
+ }
86
+ ),
87
+ supervised_keys=None,
88
+ homepage="",
89
+ citation=_CITATION,
90
+ )
91
+
92
+ def _split_generators(self, dl_manager):
93
+ """Returns SplitGenerators."""
94
+
95
+ path = "./"
96
+ data_files = {
97
+ "train": os.path.join(path, _TRAINING_FILE),
98
+ "validation": os.path.join(path, _DEV_FILE),
99
+ "test": os.path.join(path, _TEST_FILE),
100
+ }
101
+
102
+ downloaded_file = dl_manager.download_and_extract(data_files)
103
+ return [
104
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": downloaded_file ["train"]}),
105
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": downloaded_file ["validation"]}),
106
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": downloaded_file ["test"]}),
107
+ ]
108
+
109
+ def _generate_examples(self, filepath):
110
+ print("I am here" + filepath)
111
+ logger.info("⏳ Generating examples from = %s", filepath)
112
+ with open(filepath, encoding="utf-8") as f:
113
+ guid = 0
114
+ tokens = []
115
+ ner_tags = []
116
+ for line in f:
117
+ if line.startswith("-DOCSTART-") or line == "" or line == "\n":
118
+ if tokens:
119
+ yield guid, {
120
+ "id": str(guid),
121
+ "tokens": tokens,
122
+ "ner_tags": ner_tags,
123
+ }
124
+ guid += 1
125
+ tokens = []
126
+ ner_tags = []
127
+ else:
128
+ # UBBDemo tokens are space separated
129
+ splits = line.split(" ")
130
+ tokens.append(splits[0])
131
+ ner_tags.append(splits[3].rstrip())
132
+ # last example
133
+ yield guid, {
134
+ "id": str(guid),
135
+ "tokens": tokens,
136
+ "ner_tags": ner_tags,
137
+ }