Class: Yasd::Dataloader

Inherits:
Object
  • Object
show all
Defined in:
lib/yasd/dataloader.rb

Constant Summary collapse

BATCH_SIZE =
400

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(config) ⇒ Dataloader

Returns a new instance of Dataloader.



15
16
17
18
19
20
21
22
23
# File 'lib/yasd/dataloader.rb', line 15

def initialize(config)
  @client = Soapforce::Client.new
  @client.authenticate(username: config.username, password: config.password)
  @success_logger = Logger.new(config.success_log_path || "./results/#{Time.now.strftime("%Y-%m-%d")}_success.log")
  @error_logger = Logger.new(config.success_log_path || "./results/#{Time.now.strftime("%Y-%m-%d")}_error.log")
  @mapper = Mapper.new(config.mapping)
  @converter = Converter.new(config.convert)
  @batch_size = config.batch_size || BATCH_SIZE
end

Instance Attribute Details

#clientObject

Returns the value of attribute client.



11
12
13
# File 'lib/yasd/dataloader.rb', line 11

def client
  @client
end

Instance Method Details

#delete(object, filename) ⇒ Object



77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# File 'lib/yasd/dataloader.rb', line 77

def delete(object, filename)
  delete_records = []
  total_record_size = 0

  CSV.foreach(filename, headers: true) do |data|
    total_record_size += 1
    delete_records << data[:Id]
    if delete_records.length == @batch_size
      delete_results = client.delete(object, delete_records)
      log(delete_results)
      delete_records = []
    end
  end
  if delete_records.length > 0
    delete_results = client.delete(object, delete_records)
    log(delete_results)
  end
end

#export(query) ⇒ Object



25
26
27
28
29
30
31
32
33
34
35
36
37
# File 'lib/yasd/dataloader.rb', line 25

def export(query)
  query_result = client.query(query)
  return if query_result.size == 0

  CSV do |csv_out|
    header = create_csv_header(query_result)
    csv_out << header

    query_result.each do |record|
      csv_out << header.map {|field| record[field] }
    end
  end
end

#insert(object, filename) ⇒ Object



39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
# File 'lib/yasd/dataloader.rb', line 39

def insert(object, filename)
  insert_records = []
  total_record_size = 0

  CSV.foreach(filename, headers: true) do |data|
    total_record_size += 1
    insert_records << convert_and_mapping(data)
    if insert_records.length == @batch_size
      insert_results = client.create!(object, insert_records)
      log(insert_results)
      insert_records = []
    end
  end
  if insert_records.length > 0
    insert_results = client.create!(object, insert_records)
    log(insert_results)
  end
end

#update(object, filename) ⇒ Object



58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# File 'lib/yasd/dataloader.rb', line 58

def update(object, filename)
  update_records = []
  total_record_size = 0

  CSV.foreach(filename, headers: true) do |data|
    total_record_size += 1
    update_records << convert_and_mapping(data)
    if update_records.length == @batch_size
      update_results = client.update(object, update_records)
      log(update_results)
      update_records = []
    end
  end
  if update_records.length > 0
    update_results = client.update(object, update_records)
    log(update_results)
  end
end

#upsert(object, upsert_key, filename) ⇒ Object



96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
# File 'lib/yasd/dataloader.rb', line 96

def upsert(object, upsert_key, filename)
  upsert_records = []
  total_record_size = 0

  CSV.foreach(filename, headers: true) do |data|
    total_record_size += 1
    upsert_records << convert_and_mapping(data)
    if upsert_records.length == @batch_size
      upsert_results = client.upsert(object, upsert_key, upsert_records)
      log(upsert_results)
      upsert_records = []
    end
  end
  if upsert_records.length > 0
    upsert_results = client.upsert(object, upsert_key, upsert_records)
    log(upsert_results)
  end
end