Class: GitHubChangelogGenerator::OctoFetcher

Inherits:
Object
  • Object
show all
Defined in:
lib/github_changelog_generator/octo_fetcher.rb

Overview

A Fetcher responsible for all requests to GitHub and all basic manipulation with related data (such as filtering, validating, e.t.c)

Example: fetcher = GitHubChangelogGenerator::OctoFetcher.new(options)

Constant Summary collapse

PER_PAGE_NUMBER =
100
MAXIMUM_CONNECTIONS =
50
MAX_FORBIDDEN_RETRIES =
100
CHANGELOG_GITHUB_TOKEN =
"CHANGELOG_GITHUB_TOKEN"
GH_RATE_LIMIT_EXCEEDED_MSG =
"Warning: Can't finish operation: GitHub API rate limit exceeded, changelog may be " \
"missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
NO_TOKEN_PROVIDED =
"Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found. " \
"This script can make only 50 requests to GitHub API per hour without a token!"
DEFAULT_REQUEST_OPTIONS =
{ per_page: PER_PAGE_NUMBER }

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ OctoFetcher

Returns a new instance of OctoFetcher.

Parameters:

  • options (Hash) (defaults to: {})

    Options passed in

Options Hash (options):

  • :user (String)

    GitHub username

  • :project (String)

    GitHub project

  • :since (String)

    Only issues updated at or after this time are returned. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ. eg. Time.parse(“2016-01-01 10:00:00”).iso8601

  • :http_cache (Boolean)

    Use ActiveSupport::Cache::FileStore to cache http requests

  • :cache_file (Boolean)

    If using http_cache, this is the cache file path

  • :cache_log (Boolean)

    If using http_cache, this is the cache log file path



34
35
36
37
38
39
40
41
42
43
44
45
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 34

def initialize(options = {})
  @options      = options || {}
  @user         = @options[:user]
  @project      = @options[:project]
  @since        = @options[:since]
  @http_cache   = @options[:http_cache]
  @commits      = []
  @branches     = nil
  @graph        = nil
  @client = nil
  @commits_in_tag_cache = {}
end

Instance Method Details

#calculate_pages(client, method, request_options) ⇒ Integer

Returns the number of pages for a API call

Returns:

  • (Integer)

    number of pages for this API call in total



108
109
110
111
112
113
114
115
116
117
118
119
120
121
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 108

def calculate_pages(client, method, request_options)
  # Makes the first API call so that we can call last_response
  check_github_response do
    client.send(method, user_project, DEFAULT_REQUEST_OPTIONS.merge(request_options))
  end

  last_response = client.last_response

  if (last_pg = last_response.rels[:last])
    querystring_as_hash(last_pg.href)["page"].to_i
  else
    1
  end
end

#clientObject



90
91
92
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 90

def client
  @client ||= Octokit::Client.new(client_options)
end

#client_optionsObject



73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 73

def client_options
  options = {
    middleware: middleware,
    connection_options: connection_options
  }

  if (github_token = fetch_github_token)
    options[:access_token] = github_token
  end

  if (endpoint = @options[:github_endpoint])
    options[:api_endpoint] = endpoint
  end

  options
end

#closed_pr_optionsObject



148
149
150
151
152
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 148

def closed_pr_options
  @closed_pr_options ||= {
    filter: "all", labels: nil, state: "closed"
  }.tap { |options| options[:since] = @since if @since }
end

#commitsArray

Fetch all commits

Returns:

  • (Array)

    Commits in a repo.



300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 300

def commits
  if @commits.empty?
    Sync do
      barrier = Async::Barrier.new
      semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)

      if (since_commit = @options[:since_commit])
        iterate_pages(client, "commits_since", since_commit, parent: semaphore) do |new_commits|
          @commits.concat(new_commits)
        end
      else
        iterate_pages(client, "commits", parent: semaphore) do |new_commits|
          @commits.concat(new_commits)
        end
      end

      barrier.wait

      @commits.sort! do |b, a|
        a[:commit][:author][:date] <=> b[:commit][:author][:date]
      end
    end
  end
  @commits
end

#commits_in_branch(name) ⇒ Object



338
339
340
341
342
343
344
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 338

def commits_in_branch(name)
  @branches ||= client.branches(user_project).map { |branch| [branch[:name], branch] }.to_h

  if (branch = @branches[name])
    commits_in_tag(branch[:commit][:sha])
  end
end

#connection_optionsObject



67
68
69
70
71
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 67

def connection_options
  ca_file = @options[:ssl_ca_file] || ENV["SSL_CA_FILE"] || File.expand_path("ssl_certs/cacert.pem", __dir__)

  Octokit.connection_options.merge({ ssl: { ca_file: ca_file } })
end

#default_branchString

Returns Default branch of the repo.

Returns:

  • (String)

    Default branch of the repo



334
335
336
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 334

def default_branch
  @default_branch ||= client.repository(user_project)[:default_branch]
end

#fetch_closed_issues_and_prTuple

This method fetch all closed issues and separate them to pull requests and pure issues (pull request is kind of issue in term of GitHub)

Returns:

  • (Tuple)

    with (issues [Array <Hash>], pull-requests [Array <Hash>])



158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 158

def fetch_closed_issues_and_pr
  print "Fetching closed issues...\r" if @options[:verbose]
  issues = []
  page_i = 0
  count_pages = calculate_pages(client, "issues", closed_pr_options)

  iterate_pages(client, "issues", closed_pr_options) do |new_issues|
    page_i += PER_PAGE_NUMBER
    print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
    issues.concat(new_issues)
    break if @options[:max_issues] && issues.length >= @options[:max_issues]
  end
  print_empty_line
  Helper.log.info "Received issues: #{issues.count}"

  # separate arrays of issues and pull requests:
  issues.map { |issue| stringify_keys_deep(issue.to_hash) }
        .partition { |issue_or_pr| issue_or_pr["pull_request"].nil? }
end

#fetch_closed_pull_requestsArray <Hash>

Fetch all pull requests. We need them to detect :merged_at parameter

Returns:

  • (Array <Hash>)

    all pull requests



181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 181

def fetch_closed_pull_requests
  pull_requests = []
  options = { state: "closed" }

  page_i = 0
  count_pages = calculate_pages(client, "pull_requests", options)

  iterate_pages(client, "pull_requests", options) do |new_pr|
    page_i += PER_PAGE_NUMBER
    log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}"
    print_in_same_line(log_string)
    pull_requests.concat(new_pr)
  end
  print_empty_line

  Helper.log.info "Pull Request count: #{pull_requests.count}"
  pull_requests.map { |pull_request| stringify_keys_deep(pull_request.to_hash) }
end

#fetch_comments_async(prs) ⇒ Void

Fetch comments for PRs and add them to “comments”

Parameters:

  • prs (Array)

    The array of PRs.

Returns:

  • (Void)

    No return; PRs are updated in-place.



240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 240

def fetch_comments_async(prs)
  barrier = Async::Barrier.new
  semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)

  Sync do
    client = self.client

    prs.each do |pr|
      semaphore.async do
        pr["comments"] = []
        iterate_pages(client, "issue_comments", pr["number"]) do |new_comment|
          pr["comments"].concat(new_comment)
        end
        pr["comments"] = pr["comments"].map { |comment| stringify_keys_deep(comment.to_hash) }
      end
    end

    barrier.wait
  end

  nil
end

#fetch_commit(commit_id) ⇒ Hash

Fetch commit for specified event

Parameters:

  • commit_id (String)

    the SHA of a commit to fetch

Returns:

  • (Hash)


279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 279

def fetch_commit(commit_id)
  found = commits.find do |commit|
    commit["sha"] == commit_id
  end
  if found
    stringify_keys_deep(found.to_hash)
  else
    client = self.client

    # cache miss; don't add to @commits because unsure of order.
    check_github_response do
      commit = client.commit(user_project, commit_id)
      commit = stringify_keys_deep(commit.to_hash)
      commit
    end
  end
end

#fetch_date_of_tag(tag) ⇒ Time

Fetch tag time from repo

Parameters:

  • tag (Hash)

    GitHub data item about a Tag

Returns:

  • (Time)

    time of specified tag



268
269
270
271
272
273
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 268

def fetch_date_of_tag(tag)
  commit_data = fetch_commit(tag["commit"]["sha"])
  commit_data = stringify_keys_deep(commit_data.to_hash)

  commit_data["commit"]["committer"]["date"]
end

#fetch_events_async(issues) ⇒ Void

Fetch event for all issues and add them to ‘events’

Parameters:

  • issues (Array)

Returns:

  • (Void)


204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 204

def fetch_events_async(issues)
  i = 0
  # Add accept option explicitly for disabling the warning of preview API.
  preview = { accept: Octokit::Preview::PREVIEW_TYPES[:project_card_events] }

  barrier = Async::Barrier.new
  semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)

  Sync do
    client = self.client

    issues.each do |issue|
      semaphore.async do
        issue["events"] = []
        iterate_pages(client, "issue_events", issue["number"], preview) do |new_event|
          issue["events"].concat(new_event)
        end
        issue["events"] = issue["events"].map { |event| stringify_keys_deep(event.to_hash) }
        print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
        i += 1
      end
    end

    barrier.wait

    # to clear line from prev print
    print_empty_line
  end

  Helper.log.info "Fetching events for issues and PR: #{i}"
end

#fetch_tag_shas(tags) ⇒ Nil

Fetch all SHAs occurring in or before a given tag and add them to “shas_in_tag”

Parameters:

  • tags (Array)

    The array of tags.

Returns:

  • (Nil)

    No return; tags are updated in-place.



351
352
353
354
355
356
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 351

def fetch_tag_shas(tags)
  # Reverse the tags array to gain max benefit from the @commits_in_tag_cache
  tags.reverse_each do |tag|
    tag["shas_in_tag"] = commits_in_tag(tag["commit"]["sha"])
  end
end

#get_all_tagsArray <Hash>

Fetch all tags from repo

Returns:

  • (Array <Hash>)

    array of tags



99
100
101
102
103
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 99

def get_all_tags
  print "Fetching tags...\r" if @options[:verbose]

  check_github_response { github_fetch_tags }
end

#github_fetch_tagsArray <Hash>

Fill input array with tags

Returns:

  • (Array <Hash>)

    array of tags in repo



126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 126

def github_fetch_tags
  tags = []
  page_i = 0
  count_pages = calculate_pages(client, "tags", {})

  iterate_pages(client, "tags") do |new_tags|
    page_i += PER_PAGE_NUMBER
    print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
    tags.concat(new_tags)
  end
  print_empty_line

  if tags.count == 0
    Helper.log.warn "Warning: Can't find any tags in repo. \
Make sure, that you push tags to remote repo via 'git push --tags'"
  else
    Helper.log.info "Found #{tags.count} tags"
  end
  # tags are a Sawyer::Resource. Convert to hash
  tags.map { |resource| stringify_keys_deep(resource.to_hash) }
end

#middlewareObject



47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 47

def middleware
  Faraday::RackBuilder.new do |builder|
    if @http_cache
      cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
      cache_log  = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }

      builder.use(
        Faraday::HttpCache,
        serializer: Marshal,
        store: ActiveSupport::Cache::FileStore.new(cache_file),
        logger: Logger.new(cache_log),
        shared_cache: false
      )
    end

    builder.use Octokit::Response::RaiseError
    builder.adapter :async_http
  end
end

#oldest_commitHash

Return the oldest commit in a repo

Returns:

  • (Hash)

    Oldest commit in the github git history.



329
330
331
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 329

def oldest_commit
  commits.last
end