Class: GitHubChangelogGenerator::OctoFetcher
- Inherits:
-
Object
- Object
- GitHubChangelogGenerator::OctoFetcher
- Defined in:
- lib/github_changelog_generator/octo_fetcher.rb
Overview
A Fetcher responsible for all requests to GitHub and all basic manipulation with related data (such as filtering, validating, e.t.c)
Example: fetcher = GitHubChangelogGenerator::OctoFetcher.new(options)
Constant Summary collapse
- PER_PAGE_NUMBER =
100
- MAXIMUM_CONNECTIONS =
50
- MAX_FORBIDDEN_RETRIES =
100
- CHANGELOG_GITHUB_TOKEN =
"CHANGELOG_GITHUB_TOKEN"
- GH_RATE_LIMIT_EXCEEDED_MSG =
"Warning: Can't finish operation: GitHub API rate limit exceeded, changelog may be " \ "missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
- NO_TOKEN_PROVIDED =
"Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found. " \ "This script can make only 50 requests to GitHub API per hour without a token!"
- DEFAULT_REQUEST_OPTIONS =
{ per_page: PER_PAGE_NUMBER }
Instance Method Summary collapse
-
#calculate_pages(client, method, request_options) ⇒ Integer
Returns the number of pages for a API call.
- #client ⇒ Object
- #client_options ⇒ Object
- #closed_pr_options ⇒ Object
-
#commits ⇒ Array
Fetch all commits.
- #commits_in_branch(name) ⇒ Array<String>
- #connection_options ⇒ Object
-
#default_branch ⇒ String
Default branch of the repo.
-
#fetch_closed_issues_and_pr ⇒ Tuple
This method fetch all closed issues and separate them to pull requests and pure issues (pull request is kind of issue in term of GitHub).
-
#fetch_closed_pull_requests ⇒ Array <Hash>
Fetch all pull requests.
-
#fetch_comments_async(prs) ⇒ Void
Fetch comments for PRs and add them to “comments”.
-
#fetch_commit(commit_id) ⇒ Hash
Fetch commit for specified event.
-
#fetch_date_of_tag(tag) ⇒ Time
Fetch tag time from repo.
-
#fetch_events_async(issues) ⇒ Void
Fetch event for all issues and add them to ‘events’.
-
#fetch_tag_shas(tags) ⇒ Object
Fetch all SHAs occurring in or before a given tag and add them to “shas_in_tag”.
-
#get_all_tags ⇒ Array <Hash>
Fetch all tags from repo.
-
#github_fetch_tags ⇒ Array <Hash>
Fill input array with tags.
-
#initialize(options = {}) ⇒ OctoFetcher
constructor
A new instance of OctoFetcher.
- #middleware ⇒ Object
-
#oldest_commit ⇒ Hash
Return the oldest commit in a repo.
Constructor Details
#initialize(options = {}) ⇒ OctoFetcher
Returns a new instance of OctoFetcher.
33 34 35 36 37 38 39 40 41 42 43 44 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 33 def initialize( = {}) @options = || {} @user = @options[:user] @project = @options[:project] @since = @options[:since] @http_cache = @options[:http_cache] @commits = [] @branches = nil @graph = nil @client = nil @commits_in_tag_cache = {} end |
Instance Method Details
#calculate_pages(client, method, request_options) ⇒ Integer
Returns the number of pages for a API call
110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 110 def calculate_pages(client, method, ) # Makes the first API call so that we can call last_response check_github_response do client.send(method, user_project, DEFAULT_REQUEST_OPTIONS.merge()) end last_response = client.last_response if (last_pg = last_response.rels[:last]) querystring_as_hash(last_pg.href)["page"].to_i else 1 end end |
#client ⇒ Object
89 90 91 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 89 def client @client ||= Octokit::Client.new() end |
#client_options ⇒ Object
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 72 def = { middleware: middleware, connection_options: } if (github_token = fetch_github_token) [:access_token] = github_token end if (endpoint = @options[:github_endpoint]) [:api_endpoint] = endpoint end end |
#closed_pr_options ⇒ Object
150 151 152 153 154 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 150 def @closed_pr_options ||= { filter: "all", labels: nil, state: "closed" }.tap { || [:since] = @since if @since } end |
#commits ⇒ Array
Fetch all commits
302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 302 def commits if @commits.empty? Sync do = Async::Barrier.new semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: ) if (since_commit = @options[:since_commit]) iterate_pages(client, "commits_since", since_commit, parent: semaphore) do |new_commits| @commits.concat(new_commits) end else iterate_pages(client, "commits", parent: semaphore) do |new_commits| @commits.concat(new_commits) end end .wait @commits.sort! do |b, a| a[:commit][:author][:date] <=> b[:commit][:author][:date] end end end @commits end |
#commits_in_branch(name) ⇒ Array<String>
342 343 344 345 346 347 348 349 350 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 342 def commits_in_branch(name) @branches ||= client.branches(user_project).map { |branch| [branch[:name], branch] }.to_h if (branch = @branches[name]) commits_in_tag(branch[:commit][:sha]) else [] end end |
#connection_options ⇒ Object
66 67 68 69 70 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 66 def ca_file = @options[:ssl_ca_file] || ENV["SSL_CA_FILE"] || File.("ssl_certs/cacert.pem", __dir__) Octokit..merge({ ssl: { ca_file: ca_file } }) end |
#default_branch ⇒ String
Returns Default branch of the repo.
336 337 338 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 336 def default_branch @default_branch ||= client.repository(user_project)[:default_branch] end |
#fetch_closed_issues_and_pr ⇒ Tuple
This method fetch all closed issues and separate them to pull requests and pure issues (pull request is kind of issue in term of GitHub)
160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 160 def fetch_closed_issues_and_pr print "Fetching closed issues...\r" if @options[:verbose] issues = [] page_i = 0 count_pages = calculate_pages(client, "issues", ) iterate_pages(client, "issues", **) do |new_issues| page_i += PER_PAGE_NUMBER print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}") issues.concat(new_issues) break if @options[:max_issues] && issues.length >= @options[:max_issues] end print_empty_line Helper.log.info "Received issues: #{issues.count}" # separate arrays of issues and pull requests: issues.map { |issue| stringify_keys_deep(issue.to_hash) } .partition { |issue_or_pr| issue_or_pr["pull_request"].nil? } end |
#fetch_closed_pull_requests ⇒ Array <Hash>
Fetch all pull requests. We need them to detect :merged_at parameter
183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 183 def fetch_closed_pull_requests pull_requests = [] = { state: "closed" } page_i = 0 count_pages = calculate_pages(client, "pull_requests", ) iterate_pages(client, "pull_requests", **) do |new_pr| page_i += PER_PAGE_NUMBER log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}" print_in_same_line(log_string) pull_requests.concat(new_pr) end print_empty_line Helper.log.info "Pull Request count: #{pull_requests.count}" pull_requests.map { |pull_request| stringify_keys_deep(pull_request.to_hash) } end |
#fetch_comments_async(prs) ⇒ Void
Fetch comments for PRs and add them to “comments”
242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 242 def fetch_comments_async(prs) = Async::Barrier.new semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: ) Sync do client = self.client prs.each do |pr| semaphore.async do pr["comments"] = [] iterate_pages(client, "issue_comments", pr["number"]) do |new_comment| pr["comments"].concat(new_comment) end pr["comments"] = pr["comments"].map { |comment| stringify_keys_deep(comment.to_hash) } end end .wait end nil end |
#fetch_commit(commit_id) ⇒ Hash
Fetch commit for specified event
281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 281 def fetch_commit(commit_id) found = commits.find do |commit| commit["sha"] == commit_id end if found stringify_keys_deep(found.to_hash) else client = self.client # cache miss; don't add to @commits because unsure of order. check_github_response do commit = client.commit(user_project, commit_id) commit = stringify_keys_deep(commit.to_hash) commit end end end |
#fetch_date_of_tag(tag) ⇒ Time
Fetch tag time from repo
270 271 272 273 274 275 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 270 def fetch_date_of_tag(tag) commit_data = fetch_commit(tag["commit"]["sha"]) commit_data = stringify_keys_deep(commit_data.to_hash) commit_data["commit"]["committer"]["date"] end |
#fetch_events_async(issues) ⇒ Void
Fetch event for all issues and add them to ‘events’
206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 206 def fetch_events_async(issues) i = 0 # Add accept option explicitly for disabling the warning of preview API. preview = { accept: Octokit::Preview::PREVIEW_TYPES[:project_card_events] } = Async::Barrier.new semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: ) Sync do client = self.client issues.each do |issue| semaphore.async do issue["events"] = [] iterate_pages(client, "issue_events", issue["number"], **preview) do |new_event| issue["events"].concat(new_event) end issue["events"] = issue["events"].map { |event| stringify_keys_deep(event.to_hash) } print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}") i += 1 end end .wait # to clear line from prev print print_empty_line end Helper.log.info "Fetching events for issues and PR: #{i}" end |
#fetch_tag_shas(tags) ⇒ Object
Fetch all SHAs occurring in or before a given tag and add them to “shas_in_tag”
357 358 359 360 361 362 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 357 def fetch_tag_shas() # Reverse the tags array to gain max benefit from the @commits_in_tag_cache .reverse_each do |tag| tag["shas_in_tag"] = commits_in_tag(tag["commit"]["sha"]) end end |
#get_all_tags ⇒ Array <Hash>
Fetch all tags from repo
98 99 100 101 102 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 98 def print "Fetching tags...\r" if @options[:verbose] check_github_response { } end |
#github_fetch_tags ⇒ Array <Hash>
Fill input array with tags
128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 128 def = [] page_i = 0 count_pages = calculate_pages(client, "tags", {}) iterate_pages(client, "tags") do || page_i += PER_PAGE_NUMBER print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}") .concat() end print_empty_line if .count == 0 Helper.log.warn "Warning: Can't find any tags in repo. \ Make sure, that you push tags to remote repo via 'git push --tags'" else Helper.log.info "Found #{.count} tags" end # tags are a Sawyer::Resource. Convert to hash .map { |resource| stringify_keys_deep(resource.to_hash) } end |
#middleware ⇒ Object
46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 46 def middleware Faraday::RackBuilder.new do |builder| if @http_cache cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") } cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") } builder.use( Faraday::HttpCache, serializer: Marshal, store: ActiveSupport::Cache::FileStore.new(cache_file), logger: Logger.new(cache_log), shared_cache: false ) end builder.use Octokit::Response::RaiseError builder.adapter :async_http end end |
#oldest_commit ⇒ Hash
Return the oldest commit in a repo
331 332 333 |
# File 'lib/github_changelog_generator/octo_fetcher.rb', line 331 def oldest_commit commits.last end |