Class: Informers::TokenClassificationPipeline
- Defined in:
- lib/informers/pipelines.rb
Instance Method Summary collapse
- #call(texts, ignore_labels: ["O"], aggregation_strategy: "simple") ⇒ Object
- #get_tag(entity_name) ⇒ Object
- #group_entities(entities) ⇒ Object
- #group_sub_entities(entities) ⇒ Object
Methods inherited from Pipeline
Constructor Details
This class inherits a constructor from Informers::Pipeline
Instance Method Details
#call(texts, ignore_labels: ["O"], aggregation_strategy: "simple") ⇒ Object
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 |
# File 'lib/informers/pipelines.rb', line 75 def call( texts, ignore_labels: ["O"], aggregation_strategy: "simple" ) is_batched = texts.is_a?(Array) # Run tokenization model_inputs = @tokenizer.(is_batched ? texts : [texts], padding: true, truncation: true, return_offsets: true ) # Run model outputs = @model.(model_inputs) logits = outputs.logits id2label = @model.config[:id2label] to_return = [] logits.length.times do |i| ids = model_inputs[:input_ids][i] batch = logits[i] offsets = model_inputs[:offsets][i] # List of tokens that aren't ignored tokens = [] batch.length.times do |j| token_data = batch[j] top_score_index = Utils.max(token_data)[1] entity = id2label ? id2label[top_score_index.to_s] : "LABEL_#{top_score_index}" if ignore_labels.include?(entity) # We predicted a token that should be ignored. So, we skip it. next end # TODO add option to keep special tokens? word = @tokenizer.decode([ids[j]], skip_special_tokens: true) if word == "" # Was a special token. So, we skip it. next end scores = Utils.softmax(token_data) tokens << { entity: entity, score: scores[top_score_index], index: j, word: word, start: offsets[j][0], end: offsets[j][1] } end case aggregation_strategy when "simple" tokens = group_entities(tokens) when "none" # do nothing else raise ArgumentError, "Invalid aggregation_strategy" end to_return << tokens end is_batched ? to_return : to_return[0] end |
#get_tag(entity_name) ⇒ Object
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
# File 'lib/informers/pipelines.rb', line 162 def get_tag(entity_name) if entity_name.start_with?("B-") bi = "B" tag = entity_name[2..] elsif entity_name.start_with?("I-") bi = "I" tag = entity_name[2..] else # It's not in B-, I- format # Default to I- for continuation. bi = "I" tag = entity_name end [bi, tag] end |
#group_entities(entities) ⇒ Object
178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 |
# File 'lib/informers/pipelines.rb', line 178 def group_entities(entities) entity_groups = [] entity_group_disagg = [] entities.each do |entity| if entity_group_disagg.empty? entity_group_disagg << entity next end # If the current entity is similar and adjacent to the previous entity, # append it to the disaggregated entity group # The split is meant to account for the "B" and "I" prefixes # Shouldn't merge if both entities are B-type bi, tag = get_tag(entity[:entity]) _last_bi, last_tag = get_tag(entity_group_disagg[-1][:entity]) if tag == last_tag && bi != "B" # Modify subword type to be previous_type entity_group_disagg << entity else # If the current entity is different from the previous entity # aggregate the disaggregated entity group entity_groups << group_sub_entities(entity_group_disagg) entity_group_disagg = [entity] end end if entity_group_disagg.any? # it's the last entity, add it to the entity groups entity_groups << group_sub_entities(entity_group_disagg) end entity_groups end |
#group_sub_entities(entities) ⇒ Object
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 |
# File 'lib/informers/pipelines.rb', line 146 def group_sub_entities(entities) # Get the first entity in the entity group entity = entities[0][:entity].split("-", 2)[-1] scores = entities.map { |entity| entity[:score] } tokens = entities.map { |entity| entity[:word] } entity_group = { entity_group: entity, score: scores.sum / scores.count.to_f, word: @tokenizer.convert_tokens_to_string(tokens), start: entities[0][:start], end: entities[-1][:end] } entity_group end |