Class: Tml::Tokenizers::Decoration

Inherits:
Object
  • Object
show all
Defined in:
lib/tml/tokenizers/decoration.rb

Constant Summary collapse

RESERVED_TOKEN =
'tml'
RE_SHORT_TOKEN_START =
'\[[\w]*:'
RE_SHORT_TOKEN_END =
'\]'
RE_LONG_TOKEN_START =
link
'\[[\w]*\]'
RE_LONG_TOKEN_END =
/link
'\[\/[\w]*\]'
RE_HTML_TOKEN_START =

<link>

'<[^\>]*>'
RE_HTML_TOKEN_END =

</link>

'<\/[^\>]*>'
RE_TEXT =

‘[ws!.:{}()|,?]*’

'[^\[\]<>]+'

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(text, context = {}, opts = {}) ⇒ Decoration

Returns a new instance of Decoration.



72
73
74
75
76
77
# File 'lib/tml/tokenizers/decoration.rb', line 72

def initialize(text, context = {}, opts = {})
  @text = "[#{RESERVED_TOKEN}]#{text}[/#{RESERVED_TOKEN}]"
  @context = context
  @opts = opts
  tokenize
end

Instance Attribute Details

#contextObject (readonly)

Returns the value of attribute context.



56
57
58
# File 'lib/tml/tokenizers/decoration.rb', line 56

def context
  @context
end

#fragmentsObject (readonly)

Returns the value of attribute fragments.



56
57
58
# File 'lib/tml/tokenizers/decoration.rb', line 56

def fragments
  @fragments
end

#optsObject (readonly)

Returns the value of attribute opts.



56
57
58
# File 'lib/tml/tokenizers/decoration.rb', line 56

def opts
  @opts
end

#textObject (readonly)

Returns the value of attribute text.



56
57
58
# File 'lib/tml/tokenizers/decoration.rb', line 56

def text
  @text
end

#tokensObject (readonly)

Returns the value of attribute tokens.



56
57
58
# File 'lib/tml/tokenizers/decoration.rb', line 56

def tokens
  @tokens
end

Class Method Details

.required?(label) ⇒ Boolean

Returns:

  • (Boolean)


68
69
70
# File 'lib/tml/tokenizers/decoration.rb', line 68

def self.required?(label)
  label.index('[') or label.index('<')
end

Instance Method Details

#allowed_token?(token) ⇒ Boolean

Returns:

  • (Boolean)


163
164
165
166
# File 'lib/tml/tokenizers/decoration.rb', line 163

def allowed_token?(token)
  return true if opts[:allowed_tokens].nil?
  opts[:allowed_tokens].include?(token)
end

#apply(token, value) ⇒ Object



168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
# File 'lib/tml/tokenizers/decoration.rb', line 168

def apply(token, value)
  return value if token == RESERVED_TOKEN
  return value unless allowed_token?(token)

  method = context[token.to_sym] || context[token.to_s]

  if method
    if method.is_a?(Proc)
      return method.call(value)
    end

    if method.is_a?(Array) or method.is_a?(Hash)
      return default_decoration(token, value)
    end

    if method.is_a?(String)
      return method.to_s.gsub('{$0}', value)
    end

    return value
  end

  if Tml.config.default_token_value(normalize_token(token), :decoration)
    return default_decoration(token, value)
  end

  value
end

#default_decoration(token_name, token_value) ⇒ Object



139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
# File 'lib/tml/tokenizers/decoration.rb', line 139

def default_decoration(token_name, token_value)
  default_decoration = Tml.config.default_token_value(normalize_token(token_name), :decoration)

  unless default_decoration
    Tml.logger.error("Invalid decoration token value for #{token_name} in #{text}")
    return token_value
  end

  default_decoration = default_decoration.clone
  decoration_token_values = context[token_name.to_sym] || context[token_name.to_s]

  default_decoration.gsub!('{$0}', token_value.to_s)

  if decoration_token_values.is_a?(Hash)
    decoration_token_values.keys.each do |key|
      default_decoration.gsub!("{$#{key}}", decoration_token_values[key].to_s)
    end
  end

  # remove unused attributes
  default_decoration = default_decoration.gsub(/\{\$[^}]*\}/, '')
  default_decoration
end

#evaluate(expr) ⇒ Object



201
202
203
204
205
206
207
208
209
210
211
# File 'lib/tml/tokenizers/decoration.rb', line 201

def evaluate(expr)
  unless expr.is_a?(Array)
    return expr
  end

  token = expr[0]
  args = expr.drop(1)
  value = args.map { |a| self.evaluate(a) }.join('')

  apply(token, value)
end

#normalize_token(name) ⇒ Object



197
198
199
# File 'lib/tml/tokenizers/decoration.rb', line 197

def normalize_token(name)
  name.to_s.gsub(/(\d)*$/, '')
end

#parseObject



91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/tml/tokenizers/decoration.rb', line 91

def parse
  return @text unless fragments
  token = fragments.shift

  if token.match(/#{RE_SHORT_TOKEN_START}/)
    return parse_tree(token.gsub(/[\[:]/, ''), :short)
  end

  if token.match(/#{RE_LONG_TOKEN_START}/)
    return parse_tree(token.gsub(/[\[\]]/, ''), :long)
  end

  if token.match(/#{RE_HTML_TOKEN_START}/)
    return token if token.index('/>')
    return parse_tree(token.gsub(/[<>]/, '').split(' ').first, :html)
  end

  token.to_s
end

#parse_tree(name, type = :short) ⇒ Object



111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
# File 'lib/tml/tokenizers/decoration.rb', line 111

def parse_tree(name, type = :short)
  tree = [name]
  @tokens << name unless (@tokens.include?(name) or name == RESERVED_TOKEN)

  if type == :short
    first = true
    until fragments.first.nil? or fragments.first.match(/#{RE_SHORT_TOKEN_END}/)
      value = parse
      if first and value.is_a?(String)
        value = value.lstrip
        first = false
      end
      tree << value
    end
  elsif type == :long
    until fragments.first.nil? or fragments.first.match(/#{RE_LONG_TOKEN_END}/)
      tree << parse
    end
  elsif type == :html
    until fragments.first.nil? or fragments.first.match(/#{RE_HTML_TOKEN_END}/)
      tree << parse
    end
  end

  fragments.shift
  tree
end

#substituteObject



213
214
215
# File 'lib/tml/tokenizers/decoration.rb', line 213

def substitute
  evaluate(parse).gsub('[/tml]', '')
end

#tokenizeObject



79
80
81
82
83
84
85
86
87
88
89
# File 'lib/tml/tokenizers/decoration.rb', line 79

def tokenize
  re = [RE_SHORT_TOKEN_START,
        RE_SHORT_TOKEN_END,
        RE_LONG_TOKEN_START,
        RE_LONG_TOKEN_END,
        RE_HTML_TOKEN_START,
        RE_HTML_TOKEN_END,
        RE_TEXT].join('|')
  @fragments = text.scan(/#{re}/)
  @tokens = []
end