Note: this question started differently, but I deleted all previous (now unnecessary) information.
I have a CsvDataset which consists of a label (float) and a text (string). I want to transform every line so that I can feed it into a pretrained Bert model. Unfortunately, I cannot get past the .map function
files = glob.glob("example*.tsv")
d = tf.data.experimental.CsvDataset(files,
[tf.float32, tf.string],
select_cols=[3,4],
field_delim="\t",
header=True)
parsed_dataset = d.map(lambda label, text: tf.py_func(_decode_record, [label, text], [tf.float32, tf.string]))
def _decode_record(label, text):
"""Decodes a row to a TensorFlow example."""
label_list = [1, 2, 3, 4, 5]
label_map = {}
for (i, label) in enumerate(label_list):
label_map[label] = i
tokens_a = tokenizer.tokenize(text)
# Account for [CLS] and [SEP] with "- 2"
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0: (max_seq_length - 2)]
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
label_id = label_map[label]
features = collections.OrderedDict()
features["input_ids"] = create_int_feature(input_ids)
features["input_mask"] = create_int_feature(input_mask)
features["segment_ids"] = create_int_feature(segment_ids)
features["label_ids"] = create_int_feature([label_id])
features["is_real_example"] = create_int_feature(
[int(True)])
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
return tf_example
This breaks with: tensorflow.python.framework.errors_impl.UnimplementedError: Unsupported object type Example [[{{node PyFunc}}]] [Op:IteratorGetNextSync]