OctoFetcher: extract methods (#570)
* OctoFetcher: extract methods * OctoFetcher: extract github_options smethod * OctoFetcher: Document exception, name parameters - more communicative names * OctoFetcher#fetch_github_token: simplify - using ActiveSupport * OctoFetcher#init_cache: fewer statements * OctoFetcher: inline calls, name parameters * OctoFetcher: name parameters, inline local
This commit is contained in:
parent
630b8cee88
commit
23b341f715
|
@ -31,21 +31,27 @@ module GitHubChangelogGenerator
|
||||||
@project = @options[:project]
|
@project = @options[:project]
|
||||||
@since = @options[:since]
|
@since = @options[:since]
|
||||||
@http_cache = @options[:http_cache]
|
@http_cache = @options[:http_cache]
|
||||||
if @http_cache
|
@cache_file = nil
|
||||||
@cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
|
@cache_log = nil
|
||||||
@cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }
|
prepare_cache
|
||||||
init_cache
|
|
||||||
end
|
|
||||||
@github_token = fetch_github_token
|
|
||||||
|
|
||||||
@request_options = { per_page: PER_PAGE_NUMBER }
|
|
||||||
@github_options = {}
|
|
||||||
@github_options[:access_token] = @github_token unless @github_token.nil?
|
|
||||||
@github_options[:api_endpoint] = @options[:github_endpoint] unless @options[:github_endpoint].nil?
|
|
||||||
|
|
||||||
configure_octokit_ssl
|
configure_octokit_ssl
|
||||||
|
@client = Octokit::Client.new(github_options)
|
||||||
|
end
|
||||||
|
|
||||||
@client = Octokit::Client.new(@github_options)
|
def prepare_cache
|
||||||
|
return unless @http_cache
|
||||||
|
@cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
|
||||||
|
@cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }
|
||||||
|
init_cache
|
||||||
|
end
|
||||||
|
|
||||||
|
def github_options
|
||||||
|
result = {}
|
||||||
|
github_token = fetch_github_token
|
||||||
|
result[:access_token] = github_token if github_token
|
||||||
|
endpoint = @options[:github_endpoint]
|
||||||
|
result[:api_endpoint] = endpoint if endpoint
|
||||||
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
def configure_octokit_ssl
|
def configure_octokit_ssl
|
||||||
|
@ -54,21 +60,19 @@ module GitHubChangelogGenerator
|
||||||
end
|
end
|
||||||
|
|
||||||
def init_cache
|
def init_cache
|
||||||
middleware_opts = {
|
Octokit.middleware = Faraday::RackBuilder.new do |builder|
|
||||||
serializer: Marshal,
|
builder.use(Faraday::HttpCache, serializer: Marshal,
|
||||||
store: ActiveSupport::Cache::FileStore.new(@cache_file),
|
store: ActiveSupport::Cache::FileStore.new(@cache_file),
|
||||||
logger: Logger.new(@cache_log),
|
logger: Logger.new(@cache_log),
|
||||||
shared_cache: false
|
shared_cache: false)
|
||||||
}
|
|
||||||
stack = Faraday::RackBuilder.new do |builder|
|
|
||||||
builder.use Faraday::HttpCache, middleware_opts
|
|
||||||
builder.use Octokit::Response::RaiseError
|
builder.use Octokit::Response::RaiseError
|
||||||
builder.adapter Faraday.default_adapter
|
builder.adapter Faraday.default_adapter
|
||||||
# builder.response :logger
|
# builder.response :logger
|
||||||
end
|
end
|
||||||
Octokit.middleware = stack
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
DEFAULT_REQUEST_OPTIONS = { per_page: PER_PAGE_NUMBER }
|
||||||
|
|
||||||
# Fetch all tags from repo
|
# Fetch all tags from repo
|
||||||
#
|
#
|
||||||
# @return [Array <Hash>] array of tags
|
# @return [Array <Hash>] array of tags
|
||||||
|
@ -84,7 +88,7 @@ module GitHubChangelogGenerator
|
||||||
def calculate_pages(client, method, request_options)
|
def calculate_pages(client, method, request_options)
|
||||||
# Makes the first API call so that we can call last_response
|
# Makes the first API call so that we can call last_response
|
||||||
check_github_response do
|
check_github_response do
|
||||||
client.send(method, user_project, @request_options.merge(request_options))
|
client.send(method, user_project, DEFAULT_REQUEST_OPTIONS.merge(request_options))
|
||||||
end
|
end
|
||||||
|
|
||||||
last_response = client.last_response
|
last_response = client.last_response
|
||||||
|
@ -118,8 +122,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
Helper.log.info "Found #{tags.count} tags"
|
Helper.log.info "Found #{tags.count} tags"
|
||||||
end
|
end
|
||||||
# tags are a Sawyer::Resource. Convert to hash
|
# tags are a Sawyer::Resource. Convert to hash
|
||||||
tags = tags.map { |h| stringify_keys_deep(h.to_hash) }
|
tags.map { |resource| stringify_keys_deep(resource.to_hash) }
|
||||||
tags
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# This method fetch all closed issues and separate them to pull requests and pure issues
|
# This method fetch all closed issues and separate them to pull requests and pure issues
|
||||||
|
@ -148,12 +151,9 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
print_empty_line
|
print_empty_line
|
||||||
Helper.log.info "Received issues: #{issues.count}"
|
Helper.log.info "Received issues: #{issues.count}"
|
||||||
|
|
||||||
issues = issues.map { |h| stringify_keys_deep(h.to_hash) }
|
|
||||||
|
|
||||||
# separate arrays of issues and pull requests:
|
# separate arrays of issues and pull requests:
|
||||||
issues.partition do |x|
|
issues.map { |issue| stringify_keys_deep(issue.to_hash) }
|
||||||
x["pull_request"].nil?
|
.partition { |issue_or_pr| issue_or_pr["pull_request"].nil? }
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Fetch all pull requests. We need them to detect :merged_at parameter
|
# Fetch all pull requests. We need them to detect :merged_at parameter
|
||||||
|
@ -179,8 +179,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
print_empty_line
|
print_empty_line
|
||||||
|
|
||||||
Helper.log.info "Pull Request count: #{pull_requests.count}"
|
Helper.log.info "Pull Request count: #{pull_requests.count}"
|
||||||
pull_requests = pull_requests.map { |h| stringify_keys_deep(h.to_hash) }
|
pull_requests.map { |pull_request| stringify_keys_deep(pull_request.to_hash) }
|
||||||
pull_requests
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Fetch event for all issues and add them to 'events'
|
# Fetch event for all issues and add them to 'events'
|
||||||
|
@ -198,7 +197,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
iterate_pages(@client, "issue_events", issue["number"], {}) do |new_event|
|
iterate_pages(@client, "issue_events", issue["number"], {}) do |new_event|
|
||||||
issue["events"].concat(new_event)
|
issue["events"].concat(new_event)
|
||||||
end
|
end
|
||||||
issue["events"] = issue["events"].map { |h| stringify_keys_deep(h.to_hash) }
|
issue["events"] = issue["events"].map { |event| stringify_keys_deep(event.to_hash) }
|
||||||
print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
|
print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
|
||||||
i += 1
|
i += 1
|
||||||
end
|
end
|
||||||
|
@ -256,14 +255,15 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
stringify_keys_deep(value)
|
stringify_keys_deep(value)
|
||||||
end
|
end
|
||||||
when Hash
|
when Hash
|
||||||
indata.each_with_object({}) do |(k, v), output|
|
indata.each_with_object({}) do |(key, value), output|
|
||||||
output[k.to_s] = stringify_keys_deep(v)
|
output[key.to_s] = stringify_keys_deep(value)
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
indata
|
indata
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Exception raised to warn about moved repositories.
|
||||||
MovedPermanentlyError = Class.new(RuntimeError)
|
MovedPermanentlyError = Class.new(RuntimeError)
|
||||||
|
|
||||||
# Iterates through all pages until there are no more :next pages to follow
|
# Iterates through all pages until there are no more :next pages to follow
|
||||||
|
@ -276,13 +276,11 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
#
|
#
|
||||||
# @return [void]
|
# @return [void]
|
||||||
def iterate_pages(client, method, *args)
|
def iterate_pages(client, method, *args)
|
||||||
request_opts = extract_request_args(args)
|
args << DEFAULT_REQUEST_OPTIONS.merge(extract_request_args(args))
|
||||||
args.push(@request_options.merge(request_opts))
|
|
||||||
|
|
||||||
check_github_response { client.send(method, user_project, *args) }
|
check_github_response { client.send(method, user_project, *args) }
|
||||||
last_response = client.last_response
|
last_response = client.last_response.tap do |response|
|
||||||
if last_response.status == 301
|
raise(MovedPermanentlyError, response.data[:url]) if response.status == 301
|
||||||
raise MovedPermanentlyError, last_response.data[:url]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
yield(last_response.data)
|
yield(last_response.data)
|
||||||
|
@ -370,7 +368,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||||
#
|
#
|
||||||
# @return [String]
|
# @return [String]
|
||||||
def fetch_github_token
|
def fetch_github_token
|
||||||
env_var = @options[:token] ? @options[:token] : (ENV.fetch CHANGELOG_GITHUB_TOKEN, nil)
|
env_var = @options[:token].presence || ENV["CHANGELOG_GITHUB_TOKEN"]
|
||||||
|
|
||||||
Helper.log.warn NO_TOKEN_PROVIDED unless env_var
|
Helper.log.warn NO_TOKEN_PROVIDED unless env_var
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user