Class: DownloaderForCloud
Constant Summary
Constants inherited
from Downloader
Downloader::CURRENT_METADATA_VERSION
Instance Attribute Summary
Attributes inherited from Downloader
#board_id_to_filter_id, #file_system, #metadata, #start_date_in_query
Instance Method Summary
collapse
Methods inherited from Downloader
create, #download_board_configuration, #download_sprints, #download_statuses, #download_users, #file_prefix, #find_board_ids, #identify_other_issues_to_be_downloaded, #initialize, #load_metadata, #log, #make_jql, #metadata_pathname, #remove_old_files, #run, #save_metadata, #update_status_history_file
Constructor Details
This class inherits a constructor from Downloader
Instance Method Details
#attach_changelog_to_issues(issue_datas:, issue_jsons:) ⇒ Object
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 78
def attach_changelog_to_issues issue_datas:, issue_jsons:
max_results = 10_000 payload = {
'issueIdsOrKeys' => issue_datas.collect(&:key),
'maxResults' => max_results
}
loop do
response = @jira_gateway.post_request(
relative_url: '/rest/api/3/changelog/bulkfetch',
payload: JSON.generate(payload)
)
response['issueChangeLogs'].each do |issue_change_log|
issue_id = issue_change_log['issueId']
json = issue_jsons.find { |json| json['id'] == issue_id }
unless json['changelog']
json['changelog'] = {
'startAt' => 0,
'maxResults' => max_results,
'total' => 0,
'histories' => []
}
end
new_changes = issue_change_log['changeHistories']
json['changelog']['total'] += new_changes.size
json['changelog']['histories'] += new_changes
end
next_page_token = response['nextPageToken']
payload['nextPageToken'] = next_page_token
break if next_page_token.nil?
end
end
|
#bulk_fetch_issues(issue_datas:, board:, in_initial_query:) ⇒ Object
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 47
def bulk_fetch_issues issue_datas:, board:, in_initial_query:
log " Downloading #{issue_datas.size} issues", both: true
payload = {
'fields' => ['*all'],
'issueIdsOrKeys' => issue_datas.collect(&:key)
}
response = @jira_gateway.post_request(
relative_url: '/rest/api/3/issue/bulkfetch',
payload: JSON.generate(payload)
)
attach_changelog_to_issues issue_datas: issue_datas, issue_jsons: response['issues']
response['issues'].each do |issue_json|
issue_json['exporter'] = {
'in_initial_query' => in_initial_query
}
issue = Issue.new(raw: issue_json, board: board)
data = issue_datas.find { |d| d.key == issue.key }
data.up_to_date = true
data.last_modified = issue.updated
data.issue = issue
end
issue_datas
end
|
#delete_issues_from_cache_that_are_not_in_server(issue_data_hash:, path:) ⇒ Object
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 180
def delete_issues_from_cache_that_are_not_in_server issue_data_hash:, path:
@file_system.foreach path do |file|
next if file.start_with? '.'
unless /^(?<key>\w+-\d+)-\d+\.json$/ =~ file
raise "Unexpected filename in #{path}: #{file}"
end
next if issue_data_hash[key]
file_to_delete = File.join(path, file)
log " Removing #{file_to_delete} from local cache"
file_system.unlink file_to_delete
end
end
|
#download_issues(board:) ⇒ Object
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 115
def download_issues board:
log " Downloading primary issues for board #{board.id} from #{jira_instance_type}", both: true
path = File.join(@target_path, "#{file_prefix}_issues/")
unless @file_system.dir_exist?(path)
log " Creating path #{path}"
@file_system.mkdir(path)
end
filter_id = @board_id_to_filter_id[board.id]
jql = make_jql(filter_id: filter_id)
intercept_jql = @download_config.project_config.settings['intercept_jql']
jql = intercept_jql.call jql if intercept_jql
issue_data_hash = search_for_issues jql: jql, board_id: board.id, path: path
loop do
related_issue_keys = Set.new
issue_data_hash
.values
.reject { |data| data.up_to_date }
.each_slice(100) do |slice|
slice = bulk_fetch_issues(
issue_datas: slice, board: board, in_initial_query: true
)
slice.each do |data|
@file_system.save_json(
json: data.issue.raw, filename: data.cache_path
)
@file_system.utime time: data.issue.updated, file: data.cache_path
issue = data.issue
next unless issue
parent_key = issue.parent_key(project_config: @download_config.project_config)
related_issue_keys << parent_key if parent_key
issue.raw['fields']['subtasks']&.each do |raw_subtask|
related_issue_keys << raw_subtask['key']
end
end
end
related_issue_keys.reject! { |key| issue_data_hash[key] }
related_issue_keys.each do |key|
data = DownloadIssueData.new key: key
data.found_in_primary_query = false
data.up_to_date = false
data.cache_path = File.join(path, "#{key}-#{board.id}.json")
issue_data_hash[key] = data
end
break if related_issue_keys.empty?
log " Downloading linked issues for board #{board.id}", both: true
end
delete_issues_from_cache_that_are_not_in_server(
issue_data_hash: issue_data_hash, path: path
)
end
|
#jira_instance_type ⇒ Object
4
5
6
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 4
def jira_instance_type
'Jira Cloud'
end
|
#last_modified(filename:) ⇒ Object
199
200
201
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 199
def last_modified filename:
File.mtime(filename) if File.exist?(filename)
end
|
#search_for_issues(jql:, board_id:, path:) ⇒ Object
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
|
# File 'lib/jirametrics/downloader_for_cloud.rb', line 8
def search_for_issues jql:, board_id:, path:
log " JQL: #{jql}"
escaped_jql = CGI.escape jql
hash = {}
max_results = 5_000 next_page_token = nil
issue_count = 0
loop do
relative_url = +''
relative_url << '/rest/api/3/search/jql'
relative_url << "?jql=#{escaped_jql}&maxResults=#{max_results}"
relative_url << "&nextPageToken=#{next_page_token}" if next_page_token
relative_url << '&fields=updated'
json = @jira_gateway.call_url relative_url: relative_url
next_page_token = json['nextPageToken']
json['issues'].each do |i|
key = i['key']
data = DownloadIssueData.new key: key
data.key = key
data.last_modified = Time.parse i['fields']['updated']
data.found_in_primary_query = true
data.cache_path = File.join(path, "#{key}-#{board_id}.json")
data.up_to_date = last_modified(filename: data.cache_path) == data.last_modified
hash[key] = data
issue_count += 1
end
message = " Found #{issue_count} issues"
log message, both: true
break unless next_page_token
end
hash
end
|