Class: LeanplumApi::API

Inherits:
Object
  • Object
show all
Defined in:
lib/leanplum_api/api.rb

Defined Under Namespace

Classes: LeanplumValidationException

Constant Summary collapse

EXPORT_PENDING =
'PENDING'
EXPORT_RUNNING =
'RUNNING'
EXPORT_FINISHED =
'FINISHED'

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ API



9
10
11
# File 'lib/leanplum_api/api.rb', line 9

def initialize(options = {})
  fail 'LeanplumApi not configured yet!' unless LeanplumApi.configuration
end

Instance Method Details

#export_data(start_time, end_time = nil) ⇒ Object

Returns the jobId Leanplum has confirmed that using startTime and endTime, especially trying to be relatively up to the minute, leads to sort of unprocessed information that can be incomplete. They recommend using the automatic export to S3 if possible.



57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# File 'lib/leanplum_api/api.rb', line 57

def export_data(start_time, end_time = nil)
  fail "Start time #{start_time} after end time #{end_time}" if end_time && start_time > end_time
  LeanplumApi.configuration.logger.info("Requesting data export from #{start_time} to #{end_time}...")

  # Because of open questions about how startTime and endTime work (or don't work, as the case may be), we
  # only want to pass the dates unless start and end times are specifically requested.
  params = { action: 'exportData', startDate: start_time.strftime('%Y%m%d') }
  params[:startTime] = start_time.strftime('%s') if start_time.is_a?(DateTime) || start_time.is_a?(Time)
  if end_time
    params[:endDate] = end_time.strftime('%Y%m%d')
    params[:endTime] = end_time.strftime('%s') if end_time.is_a?(DateTime) || end_time.is_a?(Time)
  end

  # Handle optional S3 export params
  if LeanplumApi.configuration.s3_bucket_name
    fail 's3_bucket_name set but s3_access_id not configured!' unless LeanplumApi.configuration.s3_access_id
    fail 's3_bucket_name set but s3_access_key not configured!' unless LeanplumApi.configuration.s3_access_key

    params.merge!(
      s3BucketName: LeanplumApi.configuration.s3_bucket_name,
      s3AccessId: LeanplumApi.configuration.s3_access_id,
      s3AccessKey: LeanplumApi.configuration.s3_access_key
    )
    params.merge!(s3ObjectPrefix: LeanplumApi.configuration.s3_object_prefix) if LeanplumApi.configuration.s3_object_prefix
  end

  data_export_connection.get(params).body['response'].first['jobId']
end

#export_user(user_id) ⇒ Object



118
119
120
# File 'lib/leanplum_api/api.rb', line 118

def export_user(user_id)
  data_export_connection.get(action: 'exportUser', userId: user_id).body['response'].first['userAttributes']
end

#export_users(segment, ab_test_id) ⇒ Object

See leanplum docs. The segment syntax is identical to that produced by the “Insert Value” feature on the dashboard. Examples: ‘Country = “US”’, ‘= “US” and version = 1’.



89
90
91
# File 'lib/leanplum_api/api.rb', line 89

def export_users(segment, ab_test_id)
  data_export_connection.get(action: 'exportUsers', segment: segment, ab_test_id: ab_test_id)
end

#get_ab_test(ab_test_id) ⇒ Object



126
127
128
# File 'lib/leanplum_api/api.rb', line 126

def get_ab_test(ab_test_id)
  content_read_only_connection.get(action: 'getAbTest', id: ab_test_id).body['response'].first['abTest']
end

#get_ab_tests(only_recent = false) ⇒ Object



122
123
124
# File 'lib/leanplum_api/api.rb', line 122

def get_ab_tests(only_recent = false)
  content_read_only_connection.get(action: 'getAbTests', recent: only_recent).body['response'].first['abTests']
end

#get_export_results(job_id) ⇒ Object



93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
# File 'lib/leanplum_api/api.rb', line 93

def get_export_results(job_id)
  response = data_export_connection.get(action: 'getExportResults', jobId: job_id).body['response'].first
  if response['state'] == EXPORT_FINISHED
    LeanplumApi.configuration.logger.info("Export finished.")
    LeanplumApi.configuration.logger.debug("  Response: #{response}")
    {
      files: response['files'],
      number_of_sessions: response['numSessions'],
      number_of_bytes: response['numBytes'],
      state: response['state'],
      s3_copy_status: response['s3CopyStatus']
    }
  else
    { state: response['state'] }
  end
end

#get_message(message_id) ⇒ Object



138
139
140
# File 'lib/leanplum_api/api.rb', line 138

def get_message(message_id)
  content_read_only_connection.get(action: 'getMessage', id: message_id).body['response'].first['message']
end

#get_messages(only_recent = false) ⇒ Object



134
135
136
# File 'lib/leanplum_api/api.rb', line 134

def get_messages(only_recent = false)
  content_read_only_connection.get(action: 'getMessages', recent: only_recent).body['response'].first['messages']
end

#get_variant(variant_id) ⇒ Object



130
131
132
# File 'lib/leanplum_api/api.rb', line 130

def get_variant(variant_id)
  content_read_only_connection.get(action: 'getVariant', id: variant_id).body['response'].first['variant']
end

#get_vars(user_id) ⇒ Object



142
143
144
# File 'lib/leanplum_api/api.rb', line 142

def get_vars(user_id)
  production_connection.get(action: 'getVars', userId: user_id).body['response'].first['vars']
end

#reset_anomalous_users(user_ids) ⇒ Object

If you pass old events OR users with old date attributes (i.e. create_date for an old users), leanplum will mark them ‘anomalous’ and exclude them from your data set. Calling this method after you pass old events will fix that for all events for the specified user_id For some reason this API feature requires the developer key



150
151
152
153
154
# File 'lib/leanplum_api/api.rb', line 150

def reset_anomalous_users(user_ids)
  user_ids = Array.wrap(user_ids)
  request_data = user_ids.map { |user_id| { action: 'setUserAttributes', resetAnomalies: true, userId: user_id } }
  development_connection.multi(request_data)
end

#set_user_attributes(user_attributes, options = {}) ⇒ Object



13
14
15
# File 'lib/leanplum_api/api.rb', line 13

def set_user_attributes(user_attributes, options = {})
  track_multi(nil, user_attributes, options)
end

#track_events(events, options = {}) ⇒ Object



17
18
19
# File 'lib/leanplum_api/api.rb', line 17

def track_events(events, options = {})
  track_multi(events, nil, options)
end

#track_multi(events = nil, user_attributes = nil, options = {}) ⇒ Object

This method is for tracking events and/or updating user attributes at the same time, batched together like leanplum recommends. Set the :force_anomalous_override to catch warnings from leanplum about anomalous events and force them to not be considered anomalous



25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
# File 'lib/leanplum_api/api.rb', line 25

def track_multi(events = nil, user_attributes = nil, options = {})
  events = Array.wrap(events)
  user_attributes = Array.wrap(user_attributes)

  request_data = user_attributes.map { |h| build_user_attributes_hash(h) }
  request_data += events.map { |h| build_event_attributes_hash(h, options) }
  response = production_connection.multi(request_data).body['response']

  if options[:force_anomalous_override]
    user_ids_to_reset = []
    response.each_with_index do |indicator, i|
      if indicator['warning'] && indicator['warning']['message'] =~ /Anomaly detected/i
        # Leanplum does not return their warnings in order!!!  So we just have to reset everyone who had any events.
        # This is what the code should be:
        # user_ids_to_reset << request_data[i]['userId']

        # This is what it has to be:
        user_ids_to_reset = events.map { |e| e[:user_id] }.uniq
      end
    end

    unless user_ids_to_reset.empty?
      LeanplumApi.configuration.logger.debug("Resetting anomalous user ids: #{user_ids_to_reset}")
      reset_anomalous_users(user_ids_to_reset)
    end
  end
end

#wait_for_job(job_id, polling_interval = 60) ⇒ Object



110
111
112
113
114
115
116
# File 'lib/leanplum_api/api.rb', line 110

def wait_for_job(job_id, polling_interval = 60)
  while get_export_results(job_id)[:state] != EXPORT_FINISHED
    LeanplumApi.configuration.logger.debug("Polling job #{job_id}: #{get_export_results(job_id)}")
    sleep(polling_interval)
  end
  get_export_results(job_id)
end