2016-05-18 17:13:22 +00:00
|
|
|
module GitHubChangelogGenerator
|
|
|
|
# A Fetcher responsible for all requests to GitHub and all basic manipulation with related data
|
|
|
|
# (such as filtering, validating, e.t.c)
|
|
|
|
#
|
|
|
|
# Example:
|
2016-05-18 17:44:50 +00:00
|
|
|
# fetcher = GitHubChangelogGenerator::OctoFetcher.new options
|
2016-05-18 17:13:22 +00:00
|
|
|
|
|
|
|
class OctoFetcher
|
|
|
|
PER_PAGE_NUMBER = 100
|
2016-05-24 23:25:30 +00:00
|
|
|
MAX_THREAD_NUMBER = 1
|
2016-05-18 17:13:22 +00:00
|
|
|
CHANGELOG_GITHUB_TOKEN = "CHANGELOG_GITHUB_TOKEN"
|
|
|
|
GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: Can't finish operation: GitHub API rate limit exceeded, change log may be " \
|
|
|
|
"missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
|
|
|
|
NO_TOKEN_PROVIDED = "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found. " \
|
|
|
|
"This script can make only 50 requests to GitHub API per hour without token!"
|
|
|
|
|
|
|
|
def initialize(options = {})
|
|
|
|
@options = options || {}
|
|
|
|
@user = @options[:user]
|
|
|
|
@project = @options[:project]
|
|
|
|
# Only issues updated at or after this time are returned. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ.
|
|
|
|
@since = @options[:since] # eg. Time.parse("2016-01-01 10:00:00").iso8601
|
2016-05-24 01:28:00 +00:00
|
|
|
# Use ActiveSupport::Cache::FileStore to cache http requests
|
|
|
|
@http_cache = @options[:http_cache]
|
|
|
|
@cache_file = @options.fetch(:cache_file, '/tmp/github-changelog-http-cache') if @http_cache
|
|
|
|
@cache_log = @options.fetch(:cache_log, '/tmp/github-changelog-logger.log') if @http_cache
|
|
|
|
init_cache if @http_cache
|
|
|
|
|
2016-05-18 17:13:22 +00:00
|
|
|
@github_token = fetch_github_token
|
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
@request_options = { per_page: PER_PAGE_NUMBER }
|
2016-05-18 17:13:22 +00:00
|
|
|
@github_options = {}
|
|
|
|
@github_options[:access_token] = @github_token unless @github_token.nil?
|
|
|
|
@github_options[:api_endpoint] = @options[:github_endpoint] unless @options[:github_endpoint].nil?
|
|
|
|
|
2016-05-24 01:28:00 +00:00
|
|
|
|
2016-05-18 17:13:22 +00:00
|
|
|
client_type = @options[:github_endpoint].nil? ? Octokit::Client : Octokit::EnterpriseAdminClient
|
|
|
|
@client = client_type.new(@github_options)
|
|
|
|
end
|
|
|
|
|
2016-05-24 01:28:00 +00:00
|
|
|
def init_cache
|
2016-05-18 22:40:25 +00:00
|
|
|
middleware_opts = {
|
|
|
|
:serializer => Marshal,
|
2016-05-24 01:28:00 +00:00
|
|
|
:store => ActiveSupport::Cache::FileStore.new(@cache_file),
|
|
|
|
:logger => Logger.new(@cache_log),
|
2016-05-18 22:40:25 +00:00
|
|
|
:shared_cache => false
|
|
|
|
}
|
|
|
|
stack = Faraday::RackBuilder.new do |builder|
|
|
|
|
builder.use Faraday::HttpCache, middleware_opts
|
|
|
|
builder.use Octokit::Response::RaiseError
|
|
|
|
builder.adapter Faraday.default_adapter
|
|
|
|
# builder.response :logger
|
|
|
|
end
|
|
|
|
Octokit.middleware = stack
|
|
|
|
end
|
2016-05-18 22:35:38 +00:00
|
|
|
|
2016-05-18 17:13:22 +00:00
|
|
|
# Fetch all tags from repo
|
|
|
|
#
|
2016-05-20 06:46:13 +00:00
|
|
|
# @return [Array <Hash>] array of tags
|
2016-05-18 17:13:22 +00:00
|
|
|
def get_all_tags
|
|
|
|
print "Fetching tags...\r" if @options[:verbose]
|
|
|
|
|
|
|
|
check_github_response { github_fetch_tags }
|
|
|
|
end
|
|
|
|
|
|
|
|
# Returns the number of pages for a API call
|
|
|
|
#
|
|
|
|
# @return [Integer] number of pages for this API call in total
|
|
|
|
def calculate_pages(client, method, request_options)
|
|
|
|
# Makes the first API call so that we can call last_response
|
2016-05-18 22:35:38 +00:00
|
|
|
check_github_response do
|
|
|
|
client.send(method, user_project, @request_options.merge(request_options))
|
|
|
|
end
|
|
|
|
|
2016-05-18 17:13:22 +00:00
|
|
|
last_response = client.last_response
|
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
if (last_pg = last_response.rels[:last])
|
|
|
|
parse_url_for_vars(last_pg.href)["page"].to_i
|
2016-05-18 17:13:22 +00:00
|
|
|
else
|
|
|
|
1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Fill input array with tags
|
|
|
|
#
|
2016-05-20 06:46:13 +00:00
|
|
|
# @return [Array <Hash>] array of tags in repo
|
2016-05-18 17:13:22 +00:00
|
|
|
def github_fetch_tags
|
|
|
|
tags = []
|
|
|
|
page_i = 0
|
2016-05-25 19:46:47 +00:00
|
|
|
count_pages = calculate_pages(@client, "tags", {})
|
2016-05-18 17:13:22 +00:00
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
iterate_pages(@client, "tags", {}) do |new_tags|
|
2016-05-18 17:13:22 +00:00
|
|
|
page_i += PER_PAGE_NUMBER
|
|
|
|
print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
|
|
|
|
tags.concat(new_tags)
|
|
|
|
end
|
|
|
|
print_empty_line
|
|
|
|
|
|
|
|
if tags.count == 0
|
|
|
|
Helper.log.warn "Warning: Can't find any tags in repo.\
|
|
|
|
Make sure, that you push tags to remote repo via 'git push --tags'".yellow
|
|
|
|
else
|
|
|
|
Helper.log.info "Found #{tags.count} tags"
|
|
|
|
end
|
2016-05-20 06:46:13 +00:00
|
|
|
# tags are a Sawyer::Resource. Convert to hash
|
2016-05-25 19:46:47 +00:00
|
|
|
tags = tags.map { |h| h.to_hash.stringify_keys_deep! }
|
2016-05-18 17:13:22 +00:00
|
|
|
tags
|
|
|
|
end
|
|
|
|
|
|
|
|
# This method fetch all closed issues and separate them to pull requests and pure issues
|
|
|
|
# (pull request is kind of issue in term of GitHub)
|
|
|
|
#
|
2016-05-20 06:46:13 +00:00
|
|
|
# @return [Tuple] with (issues [Array <Hash>], pull-requests [Array <Hash>])
|
2016-05-18 17:13:22 +00:00
|
|
|
def fetch_closed_issues_and_pr
|
|
|
|
print "Fetching closed issues...\r" if @options[:verbose]
|
|
|
|
issues = []
|
|
|
|
options = {
|
2016-05-25 19:46:47 +00:00
|
|
|
state: "closed",
|
|
|
|
filter: "all",
|
|
|
|
labels: nil
|
2016-05-18 17:13:22 +00:00
|
|
|
}
|
2016-05-18 22:35:38 +00:00
|
|
|
options[:since] = @since unless @since.nil?
|
2016-05-18 17:13:22 +00:00
|
|
|
|
2016-05-18 22:35:38 +00:00
|
|
|
page_i = 0
|
2016-05-25 19:46:47 +00:00
|
|
|
count_pages = calculate_pages(@client, "issues", options)
|
2016-05-18 17:13:22 +00:00
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
iterate_pages(@client, "issues", options) do |new_issues|
|
2016-05-18 22:35:38 +00:00
|
|
|
page_i += PER_PAGE_NUMBER
|
|
|
|
print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
|
|
|
|
issues.concat(new_issues)
|
|
|
|
break if @options[:max_issues] && issues.length >= @options[:max_issues]
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
2016-05-18 22:35:38 +00:00
|
|
|
print_empty_line
|
|
|
|
Helper.log.info "Received issues: #{issues.count}"
|
2016-05-18 17:13:22 +00:00
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
issues = issues.map { |h| h.to_hash.stringify_keys_deep! }
|
2016-05-20 06:46:13 +00:00
|
|
|
|
2016-05-18 17:13:22 +00:00
|
|
|
# separate arrays of issues and pull requests:
|
|
|
|
issues.partition do |x|
|
2016-05-25 19:46:47 +00:00
|
|
|
x["pull_request"].nil?
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Fetch all pull requests. We need them to detect :merged_at parameter
|
|
|
|
#
|
2016-05-20 06:46:13 +00:00
|
|
|
# @return [Array <Hash>] all pull requests
|
2016-05-18 17:13:22 +00:00
|
|
|
def fetch_closed_pull_requests
|
|
|
|
pull_requests = []
|
2016-05-25 19:46:47 +00:00
|
|
|
options = { state: "closed" }
|
2016-05-18 17:13:22 +00:00
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
unless @options[:release_branch].nil?
|
2016-05-18 17:13:22 +00:00
|
|
|
options[:base] = @options[:release_branch]
|
|
|
|
end
|
|
|
|
|
2016-05-18 22:35:38 +00:00
|
|
|
page_i = 0
|
2016-05-25 19:46:47 +00:00
|
|
|
count_pages = calculate_pages(@client, "pull_requests", options)
|
2016-05-18 22:35:38 +00:00
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
iterate_pages(@client, "pull_requests", options) do |new_pr|
|
|
|
|
page_i += PER_PAGE_NUMBER
|
2016-05-18 22:35:38 +00:00
|
|
|
log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}"
|
|
|
|
print_in_same_line(log_string)
|
|
|
|
pull_requests.concat(new_pr)
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
2016-05-18 22:35:38 +00:00
|
|
|
print_empty_line
|
2016-05-18 17:13:22 +00:00
|
|
|
|
2016-05-18 22:35:38 +00:00
|
|
|
Helper.log.info "Pull Request count: #{pull_requests.count}"
|
2016-05-25 19:46:47 +00:00
|
|
|
pull_requests = pull_requests.map { |h| h.to_hash.stringify_keys_deep! }
|
2016-05-18 17:13:22 +00:00
|
|
|
pull_requests
|
|
|
|
end
|
|
|
|
|
2016-05-20 06:55:16 +00:00
|
|
|
# Fetch event for all issues and add them to 'events'
|
2016-05-18 17:13:22 +00:00
|
|
|
#
|
|
|
|
# @param [Array] issues
|
|
|
|
# @return [Void]
|
|
|
|
def fetch_events_async(issues)
|
|
|
|
i = 0
|
|
|
|
threads = []
|
|
|
|
|
|
|
|
issues.each_slice(MAX_THREAD_NUMBER) do |issues_slice|
|
|
|
|
issues_slice.each do |issue|
|
|
|
|
threads << Thread.new do
|
2016-05-25 19:46:47 +00:00
|
|
|
issue["events"] = []
|
|
|
|
iterate_pages(@client, "issue_events", issue["number"], {}) do |new_event|
|
|
|
|
issue["events"].concat(new_event)
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
2016-05-25 19:46:47 +00:00
|
|
|
issue["events"] = issue["events"].map { |h| h.to_hash.stringify_keys_deep! }
|
2016-05-18 17:13:22 +00:00
|
|
|
print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
|
|
|
|
i += 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
threads.each(&:join)
|
|
|
|
threads = []
|
|
|
|
end
|
|
|
|
|
|
|
|
# to clear line from prev print
|
|
|
|
print_empty_line
|
|
|
|
|
|
|
|
Helper.log.info "Fetching events for issues and PR: #{i}"
|
|
|
|
end
|
|
|
|
|
|
|
|
# Fetch tag time from repo
|
|
|
|
#
|
|
|
|
# @param [Hash] tag
|
|
|
|
# @return [Time] time of specified tag
|
|
|
|
def fetch_date_of_tag(tag)
|
2016-05-25 19:46:47 +00:00
|
|
|
commit_data = check_github_response { @client.commit(user_project, tag["commit"]["sha"]) }
|
2016-05-20 07:03:42 +00:00
|
|
|
commit_data = commit_data.to_hash.stringify_keys_deep!
|
2016-05-18 22:35:38 +00:00
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
commit_data["commit"]["committer"]["date"]
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Fetch commit for specified event
|
|
|
|
#
|
|
|
|
# @return [Hash]
|
|
|
|
def fetch_commit(event)
|
2016-05-18 22:35:38 +00:00
|
|
|
check_github_response do
|
2016-05-25 19:46:47 +00:00
|
|
|
commit = @client.commit(user_project, event["commit_id"])
|
2016-05-20 06:46:13 +00:00
|
|
|
commit = commit.to_hash.stringify_keys_deep!
|
2016-05-25 19:46:47 +00:00
|
|
|
commit
|
2016-05-18 22:35:38 +00:00
|
|
|
end
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
2016-05-18 17:44:50 +00:00
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
# Iterates through all pages until there are no more :next pages to follow
|
|
|
|
# yields the result per page
|
|
|
|
#
|
|
|
|
# @param [Octokit::Client] client
|
|
|
|
# @param [String] method (eg. 'tags')
|
|
|
|
# @return [Integer] total number of pages
|
2016-05-25 19:46:47 +00:00
|
|
|
def iterate_pages(client, method, *args)
|
2016-05-18 17:44:50 +00:00
|
|
|
if args.size == 1 && args.first.is_a?(Hash)
|
|
|
|
request_options = args.delete_at(0)
|
|
|
|
elsif args.size > 1 && args.last.is_a?(Hash)
|
|
|
|
request_options = args.delete_at(args.length - 1)
|
|
|
|
end
|
|
|
|
|
|
|
|
args.push(@request_options.merge(request_options))
|
|
|
|
|
|
|
|
pages = 1
|
2016-05-18 22:35:38 +00:00
|
|
|
|
|
|
|
check_github_response do
|
|
|
|
client.send(method, user_project, *args)
|
|
|
|
end
|
2016-05-18 17:44:50 +00:00
|
|
|
last_response = client.last_response
|
|
|
|
|
|
|
|
yield last_response.data
|
|
|
|
|
2016-05-25 19:46:47 +00:00
|
|
|
until (next_one = last_response.rels[:next]).nil?
|
|
|
|
pages += 1
|
2016-05-18 17:44:50 +00:00
|
|
|
|
2016-05-18 22:35:38 +00:00
|
|
|
last_response = check_github_response { next_one.get }
|
2016-05-18 17:44:50 +00:00
|
|
|
yield last_response.data
|
|
|
|
end
|
|
|
|
|
|
|
|
pages
|
|
|
|
end
|
|
|
|
|
|
|
|
# This is wrapper with rescue block
|
|
|
|
#
|
|
|
|
# @return [Object] returns exactly the same, what you put in the block, but wrap it with begin-rescue block
|
|
|
|
def check_github_response
|
|
|
|
begin
|
|
|
|
value = yield
|
2016-05-18 18:01:33 +00:00
|
|
|
rescue Octokit::Unauthorized => e
|
2016-05-20 06:46:13 +00:00
|
|
|
Helper.log.error e.message.red
|
2016-05-18 17:44:50 +00:00
|
|
|
abort "Error: wrong GitHub token"
|
2016-05-18 18:01:33 +00:00
|
|
|
rescue Octokit::Forbidden => e
|
2016-05-20 06:46:13 +00:00
|
|
|
Helper.log.warn e.message.red
|
2016-05-18 17:44:50 +00:00
|
|
|
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
2016-06-01 00:45:45 +00:00
|
|
|
Helper.log.warn @client.rate_limit
|
2016-05-18 17:44:50 +00:00
|
|
|
end
|
|
|
|
value
|
|
|
|
end
|
|
|
|
|
|
|
|
# Print specified line on the same string
|
|
|
|
#
|
|
|
|
# @param [String] log_string
|
|
|
|
def print_in_same_line(log_string)
|
|
|
|
print log_string + "\r"
|
|
|
|
end
|
|
|
|
|
|
|
|
# Print long line with spaces on same line to clear prev message
|
|
|
|
def print_empty_line
|
|
|
|
print_in_same_line(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
# Returns GitHub token. First try to use variable, provided by --token option,
|
|
|
|
# otherwise try to fetch it from CHANGELOG_GITHUB_TOKEN env variable.
|
|
|
|
#
|
|
|
|
# @return [String]
|
|
|
|
def fetch_github_token
|
|
|
|
env_var = @options[:token] ? @options[:token] : (ENV.fetch CHANGELOG_GITHUB_TOKEN, nil)
|
|
|
|
|
|
|
|
Helper.log.warn NO_TOKEN_PROVIDED.yellow unless env_var
|
|
|
|
|
|
|
|
env_var
|
|
|
|
end
|
|
|
|
|
|
|
|
# @return [String] helper to return Github "user/project"
|
|
|
|
def user_project
|
|
|
|
"#{@options[:user]}/#{@options[:project]}"
|
|
|
|
end
|
2016-05-18 17:51:04 +00:00
|
|
|
|
|
|
|
# Parses a URI and returns a hash of all GET variables
|
|
|
|
#
|
|
|
|
# @param [String] uri eg. https://api.github.com/repositories/43914960/tags?page=37&foo=1
|
|
|
|
# @return [Hash] of all GET variables. eg. { 'page' => 37, 'foo' => 1 }
|
|
|
|
def parse_url_for_vars(uri)
|
2016-06-01 00:45:45 +00:00
|
|
|
URI(uri).query.split("&").each_with_object({}) do |get_var, params|
|
2016-05-25 19:46:47 +00:00
|
|
|
k, v = get_var.split("=")
|
2016-05-18 17:51:04 +00:00
|
|
|
params[k] = v
|
|
|
|
params
|
|
|
|
end
|
|
|
|
end
|
2016-05-18 17:13:22 +00:00
|
|
|
end
|
|
|
|
end
|