Class: Marty::DataExporter
- Inherits:
-
Object
- Object
- Marty::DataExporter
- Defined in:
- lib/marty/data_exporter.rb
Class Method Summary collapse
- .decode_json(s) ⇒ Object
-
.do_export(ts, klass, sort_field = nil, exclude_attrs = []) ⇒ Object
Given a Mcfly klass, generate an export array.
- .do_export_query_result(klass, qres, exclude_attrs = []) ⇒ Object
- .encode_json(s) ⇒ Object
- .export_attrs(klass, obj, attrs = nil, exclude_attrs = []) ⇒ Object
- .export_headers(klass, attrs = nil, exclude_attrs = []) ⇒ Object
-
.export_obj(obj) ⇒ Object
Export a single object to hash – FIXME: inefficient implementation.
- .get_attrs_in_order(klass, attrs) ⇒ Object
-
.hash_array_keys(hl) ⇒ Object
given an array of hashes, return set of all keys.
- .hash_array_merge(hl, transpose) ⇒ Object
- .to_csv(obj, config = nil) ⇒ Object
Class Method Details
.decode_json(s) ⇒ Object
32 33 34 |
# File 'lib/marty/data_exporter.rb', line 32 def self.decode_json(s) Zlib.inflate Base64.strict_decode64(s) end |
.do_export(ts, klass, sort_field = nil, exclude_attrs = []) ⇒ Object
Given a Mcfly klass, generate an export array. Can potentially use up a lot of memory if the result set is large.
145 146 147 148 149 150 151 152 153 154 |
# File 'lib/marty/data_exporter.rb', line 145 def self.do_export(ts, klass, sort_field = nil, exclude_attrs = []) query = klass if Mcfly.has_mcfly?(klass) ts = Mcfly.normalize_infinity(ts) query = query.where('obsoleted_dt >= ? AND created_dt < ?', ts, ts) end do_export_query_result(klass, query.order(sort_field || :id), exclude_attrs) end |
.do_export_query_result(klass, qres, exclude_attrs = []) ⇒ Object
156 157 158 159 160 161 162 163 |
# File 'lib/marty/data_exporter.rb', line 156 def self.do_export_query_result(klass, qres, exclude_attrs = []) # strip _id from assoc fields header = [export_headers(klass, nil, exclude_attrs).flatten] header + qres.map do |obj| export_attrs(klass, obj, nil, exclude_attrs).flatten(1) end end |
.encode_json(s) ⇒ Object
28 29 30 |
# File 'lib/marty/data_exporter.rb', line 28 def self.encode_json(s) Base64.strict_encode64 Zlib.deflate(s) end |
.export_attrs(klass, obj, attrs = nil, exclude_attrs = []) ⇒ Object
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
# File 'lib/marty/data_exporter.rb', line 92 def self.export_attrs(klass, obj, attrs = nil, exclude_attrs = []) col_types = Marty::DataConversion.col_types(klass) attr_list_raw = (attrs || col_types.keys).map(&:to_s) - exclude_attrs attr_list = get_attrs_in_order(klass, attr_list_raw) attr_list.map do |c| v = obj.send(c.to_sym) type = col_types[c] # return [value] if not assoc or nil next [v] if !type.is_a?(Hash) # no child row, return nils for each field next [nil] * type[:assoc_keys].count if v.nil? assoc_keys = type[:assoc_keys] assoc_class = type[:assoc_class] assoc_obj = assoc_class.find(v) # FIXME: this recursion will fail if a reference which then # makes sub-references is nil. To handle this, we'd need to # create the export structure first. export_attrs(assoc_class, assoc_obj, assoc_keys).flatten(1) end end |
.export_headers(klass, attrs = nil, exclude_attrs = []) ⇒ Object
119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
# File 'lib/marty/data_exporter.rb', line 119 def self.export_headers(klass, attrs = nil, exclude_attrs = []) col_types = Marty::DataConversion.col_types(klass) attr_list_raw = (attrs || col_types.keys).map(&:to_s) - exclude_attrs attr_list = get_attrs_in_order(klass, attr_list_raw) attr_list.map do |c| type = col_types[c] next c unless type.is_a?(Hash) # remove _id c = c[0..-4] assoc_keys = type[:assoc_keys] # if association has a single key, just use col name next c if assoc_keys.length == 1 assoc_class = type[:assoc_class] export_headers(assoc_class, assoc_keys).map { |k| "#{c}__#{k}" } end end |
.export_obj(obj) ⇒ Object
Export a single object to hash – FIXME: inefficient implementation
167 168 169 170 171 172 |
# File 'lib/marty/data_exporter.rb', line 167 def self.export_obj(obj) klass = obj.class headers = export_headers(klass) rec = export_attrs(klass, obj).flatten Hash[headers.zip(rec)] end |
.get_attrs_in_order(klass, attrs) ⇒ Object
86 87 88 89 90 |
# File 'lib/marty/data_exporter.rb', line 86 def self.get_attrs_in_order(klass, attrs) return attrs unless klass.const_defined?(:EXPORT_ORDER) klass::EXPORT_ORDER.select { |attr| attrs.include?(attr) } end |
.hash_array_keys(hl) ⇒ Object
given an array of hashes, return set of all keys
3 4 5 |
# File 'lib/marty/data_exporter.rb', line 3 def self.hash_array_keys(hl) hl.each_with_object(Set.new) { |h, keys| keys.merge(h.keys) } end |
.hash_array_merge(hl, transpose) ⇒ Object
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
# File 'lib/marty/data_exporter.rb', line 7 def self.hash_array_merge(hl, transpose) # given a list of hashes hl, generates a merged hash. The # resulting hash contains a superset of all the hash keys. The # values are corresponding values from each hash in hl. # e.g. the following # # [{"a"=>1, "b"=>2}, {"a"=>11, "c"=>33}, {"a"=>1111, "b"=>222, "c"=>333}] # # maps to ... # # [["a", "b", "c"], [1, 2, nil], [11, nil, 33], [1111, 222, 333]] keys = hash_array_keys(hl) return keys.each_with_object({}) do |k, rh| rh[k] = hl.map { |h| h[k] } end if transpose [keys.to_a] + hl.map { |h| keys.map { |k| h[k] } } end |
.to_csv(obj, config = nil) ⇒ Object
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
# File 'lib/marty/data_exporter.rb', line 36 def self.to_csv(obj, config = nil) obj = [obj] unless obj.respond_to? :map config ||= {} # if all array items are hashes, we merge them obj = hash_array_merge(obj, config['transpose']) if obj.is_a?(Array) && obj.all? { |x| x.is_a? Hash } # symbolize config keys as expected by CSV.generate conf = config.each_with_object({}) do |(k, v), h| h[k.to_sym] = v unless k.to_s == 'transpose' end # FIXME: very hacky to default row_sep to CRLF conf[:row_sep] ||= "\r\n" # remove non CSV.generate options before entering generate blocks readable = conf.delete(:readable) # FIXME: the following is ridiculously complex. We have different # data paths for hashes and arrays. Also, arrays can turn into # hashes is all their items are hashes! We map to complex objects # to JSON when inside hashes, but not arrays. Really need to # rethink this. Probably should have separate functions for # to_csv for hash and arrays. return CSV.generate(conf) do |csv| obj.each do |x| csv << x.flatten(1).map { |v| v.nil? ? nil : v.to_s } end end if obj.is_a?(Hash) CSV.generate(conf) do |csv| obj.each do |x| x = [x] unless x.respond_to? :map csv << x.map do |v| case v when Array, Hash readable ? v.to_json : encode_json(v.to_json) when nil nil else v.to_s end end end end end |