Class: SqlToCsvStream::JsonEnumerator
- Inherits:
-
Object
- Object
- SqlToCsvStream::JsonEnumerator
- Defined in:
- lib/sql_to_csv_stream/json_enumerator.rb
Constant Summary collapse
- JSON_COPY_OPTIONS =
{ format: 'TEXT' }.freeze
Instance Method Summary collapse
- #each(&stream) ⇒ Object
-
#initialize(object, connection: PostgresqlCopyEnumerator.default_connection) ⇒ JsonEnumerator
constructor
A new instance of JsonEnumerator.
Constructor Details
#initialize(object, connection: PostgresqlCopyEnumerator.default_connection) ⇒ JsonEnumerator
Returns a new instance of JsonEnumerator.
7 8 9 10 11 12 13 14 15 16 17 18 19 |
# File 'lib/sql_to_csv_stream/json_enumerator.rb', line 7 def initialize(object, connection: PostgresqlCopyEnumerator.default_connection) sql = (object.respond_to?(:to_sql) ? object.to_sql : object.to_s).chomp(';') # The inspiration of this magic was: # https://dba.stackexchange.com/questions/90482/export-postgres-table-as-json?newreg=0b667caa47c34084bee6c90feec5e4be # # The idea is to stream with postgresql COPY command with the TEXT format. # To do this, we need to convert each row in the postgresql query result into JSON first. # This query does exactly that by using ROW_TO_JSON. # There is one edge case when text in that JSON contains backslashes -- to not break our # escaping, we need to manually add additional escape-backslashes. sql = "SELECT REGEXP_REPLACE(ROW_TO_JSON(t)::TEXT, '\\\\', '\\', 'g') FROM (#{sql}) AS t" @copy_enum = PostgresqlCopyEnumerator.new(sql, connection: connection, copy_options: JSON_COPY_OPTIONS) end |
Instance Method Details
#each(&stream) ⇒ Object
21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
# File 'lib/sql_to_csv_stream/json_enumerator.rb', line 21 def each(&stream) stream.yield('[') # do not prepend a comma on the very first item # to not break json syntax prepend_comma = false @copy_enum.each do |line| line = ',' + line if prepend_comma prepend_comma = true stream.yield(line.chomp) end stream.yield("]\n") end |