Compare commits
41 Commits
master
...
olleolleol
Author | SHA1 | Date | |
---|---|---|---|
|
0c96db9490 | ||
|
1c8a2d29ea | ||
|
fe0fd94ea3 | ||
|
3fc0012de2 | ||
|
de525fb1e8 | ||
|
cacd6dc3c8 | ||
|
db1ffdd59b | ||
|
0189c2d736 | ||
|
559defcb55 | ||
|
0fc6da1a75 | ||
|
4b50b5b257 | ||
|
f98cc8f292 | ||
|
8056600180 | ||
|
6d45d90a5f | ||
|
3df84403c4 | ||
|
1763f9c8ea | ||
|
180b4cb6c9 | ||
|
b82aa64eee | ||
|
cb6286fb83 | ||
|
7dcf617d61 | ||
|
e89b03fe48 | ||
|
9ff27b690f | ||
|
bcf57dbe0e | ||
|
4cfb1e4a5f | ||
|
b81f8a4169 | ||
|
3179f29a7c | ||
|
aee2f43235 | ||
|
63b3de1281 | ||
|
0517674f04 | ||
|
53b0684e67 | ||
|
ca5d6d9675 | ||
|
3cb3584bb9 | ||
|
4fb674b68e | ||
|
e494fc004e | ||
|
4ecb21c0b0 | ||
|
3cf2802d9a | ||
|
507b89c665 | ||
|
cd14a86f84 | ||
|
5d7098282d | ||
|
0751082526 | ||
|
baa4e3e659 |
|
@ -14,9 +14,9 @@ matrix:
|
|||
script: gem build github_changelog_generator && bundle install
|
||||
gemfile: spec/install-gem-in-bundler.gemfile
|
||||
- rvm: 2.1
|
||||
gemfile: gemfiles/Gemfile.with_rack16
|
||||
gemfile: gemfiles/Gemfile.2_1
|
||||
- rvm: 2.3.1
|
||||
gemfile: gemfiles/Gemfile.with_rack2
|
||||
gemfile: gemfiles/Gemfile.2_3_1
|
||||
- rvm: 2.4.0-preview2
|
||||
gemfile: gemfiles/Gemfile.2_4_0
|
||||
|
||||
|
|
3
Gemfile
3
Gemfile
|
@ -11,6 +11,9 @@ group :development, :test do
|
|||
end
|
||||
|
||||
group :test do
|
||||
gem "vcr"
|
||||
gem "multi_json"
|
||||
gem "webmock"
|
||||
gem "coveralls", "~>0.8", require: false
|
||||
gem "simplecov", "~>0.10", require: false
|
||||
gem "codeclimate-test-reporter", "~>0.4"
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
eval_gemfile File.expand_path('../../Gemfile', __FILE__)
|
||||
gem 'rack', '~> 1.6'
|
||||
gem 'activesupport', '~> 4'
|
|
@ -1,2 +1,3 @@
|
|||
eval_gemfile File.expand_path('../../Gemfile', __FILE__)
|
||||
gem 'rack', '>= 2'
|
||||
gem 'activesupport', '>= 4'
|
|
@ -25,6 +25,8 @@ Gem::Specification.new do |spec|
|
|||
spec.require_paths = ["lib"]
|
||||
|
||||
spec.add_runtime_dependency "rake", ">= 10.0"
|
||||
spec.add_runtime_dependency "github_api", ">= 0.14"
|
||||
spec.add_runtime_dependency "rainbow", ">= 2.1"
|
||||
spec.add_runtime_dependency("octokit", ["~> 4.0"])
|
||||
spec.add_runtime_dependency("faraday-http-cache")
|
||||
spec.add_runtime_dependency("activesupport")
|
||||
end
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
#!/usr/bin/env ruby
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "github_api"
|
||||
require "octokit"
|
||||
require "faraday-http-cache"
|
||||
require "logger"
|
||||
require "active_support"
|
||||
require "json"
|
||||
require "multi_json"
|
||||
require "benchmark"
|
||||
|
||||
require_relative "github_changelog_generator/helper"
|
||||
|
@ -11,6 +15,8 @@ require_relative "github_changelog_generator/parser_file"
|
|||
require_relative "github_changelog_generator/generator/generator"
|
||||
require_relative "github_changelog_generator/version"
|
||||
require_relative "github_changelog_generator/reader"
|
||||
require_relative "github_changelog_generator/hash"
|
||||
require_relative "github_changelog_generator/array"
|
||||
|
||||
# The main module, where placed all classes (now, at least)
|
||||
module GitHubChangelogGenerator
|
||||
|
|
14
lib/github_changelog_generator/array.rb
Normal file
14
lib/github_changelog_generator/array.rb
Normal file
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
class Array
|
||||
def stringify_keys_deep!
|
||||
new_ar = []
|
||||
each do |value|
|
||||
new_value = value
|
||||
if value.is_a?(Hash) || value.is_a?(Array)
|
||||
new_value = value.stringify_keys_deep!
|
||||
end
|
||||
new_ar << new_value
|
||||
end
|
||||
new_ar
|
||||
end
|
||||
end
|
|
@ -1,226 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
module GitHubChangelogGenerator
|
||||
# A Fetcher responsible for all requests to GitHub and all basic manipulation with related data
|
||||
# (such as filtering, validating, e.t.c)
|
||||
#
|
||||
# Example:
|
||||
# fetcher = GitHubChangelogGenerator::Fetcher.new options
|
||||
|
||||
class Fetcher
|
||||
PER_PAGE_NUMBER = 30
|
||||
MAX_SIMULTANEOUS_REQUESTS = 25
|
||||
CHANGELOG_GITHUB_TOKEN = "CHANGELOG_GITHUB_TOKEN"
|
||||
GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: Can't finish operation: GitHub API rate limit exceeded, change log may be " \
|
||||
"missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
|
||||
NO_TOKEN_PROVIDED = "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found. " \
|
||||
"This script can make only 50 requests to GitHub API per hour without token!"
|
||||
|
||||
def initialize(options = {})
|
||||
@options = options || {}
|
||||
@user = @options[:user]
|
||||
@project = @options[:project]
|
||||
@github_token = fetch_github_token
|
||||
@github_options = { per_page: PER_PAGE_NUMBER }
|
||||
@github_options[:oauth_token] = @github_token unless @github_token.nil?
|
||||
@github_options[:endpoint] = @options[:github_endpoint] unless @options[:github_endpoint].nil?
|
||||
@github_options[:site] = @options[:github_endpoint] unless @options[:github_site].nil?
|
||||
|
||||
@github = check_github_response { Github.new @github_options }
|
||||
end
|
||||
|
||||
# Returns GitHub token. First try to use variable, provided by --token option,
|
||||
# otherwise try to fetch it from CHANGELOG_GITHUB_TOKEN env variable.
|
||||
#
|
||||
# @return [String]
|
||||
def fetch_github_token
|
||||
env_var = @options[:token] ? @options[:token] : (ENV.fetch CHANGELOG_GITHUB_TOKEN, nil)
|
||||
|
||||
Helper.log.warn NO_TOKEN_PROVIDED unless env_var
|
||||
|
||||
env_var
|
||||
end
|
||||
|
||||
# Fetch all tags from repo
|
||||
# @return [Array] array of tags
|
||||
def get_all_tags
|
||||
print "Fetching tags...\r" if @options[:verbose]
|
||||
|
||||
check_github_response { github_fetch_tags }
|
||||
end
|
||||
|
||||
# This is wrapper with rescue block
|
||||
# @return [Object] returns exactly the same, what you put in the block, but wrap it with begin-rescue block
|
||||
def check_github_response
|
||||
begin
|
||||
value = yield
|
||||
rescue Github::Error::Unauthorized => e
|
||||
Helper.log.error e.response_message
|
||||
abort "Error: wrong GitHub token"
|
||||
rescue Github::Error::Forbidden => e
|
||||
Helper.log.warn e.response_message
|
||||
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
|
||||
end
|
||||
value
|
||||
end
|
||||
|
||||
# Fill input array with tags
|
||||
# @return [Array] array of tags in repo
|
||||
def github_fetch_tags
|
||||
tags = []
|
||||
response = @github.repos.tags @options[:user], @options[:project]
|
||||
page_i = 0
|
||||
count_pages = response.count_pages
|
||||
response.each_page do |page|
|
||||
page_i += PER_PAGE_NUMBER
|
||||
print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
|
||||
tags.concat(page) unless page.nil?
|
||||
end
|
||||
print_empty_line
|
||||
|
||||
if tags.empty?
|
||||
Helper.log.warn "Warning: Can't find any tags in repo.\
|
||||
Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||
else
|
||||
Helper.log.info "Found #{tags.count} tags"
|
||||
end
|
||||
tags
|
||||
end
|
||||
|
||||
# This method fetch all closed issues and separate them to pull requests and pure issues
|
||||
# (pull request is kind of issue in term of GitHub)
|
||||
# @return [Tuple] with (issues, pull-requests)
|
||||
def fetch_closed_issues_and_pr
|
||||
print "Fetching closed issues...\r" if @options[:verbose]
|
||||
issues = []
|
||||
|
||||
begin
|
||||
response = @github.issues.list user: @options[:user],
|
||||
repo: @options[:project],
|
||||
state: "closed",
|
||||
filter: "all",
|
||||
labels: nil
|
||||
page_i = 0
|
||||
count_pages = response.count_pages
|
||||
response.each_page do |page|
|
||||
page_i += PER_PAGE_NUMBER
|
||||
print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
|
||||
issues.concat(page)
|
||||
break if @options[:max_issues] && issues.length >= @options[:max_issues]
|
||||
end
|
||||
print_empty_line
|
||||
Helper.log.info "Received issues: #{issues.count}"
|
||||
|
||||
rescue Github::Error::Forbidden => e
|
||||
Helper.log.warn e.error_messages.map { |m| m[:message] }.join(", ")
|
||||
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
|
||||
end
|
||||
|
||||
# separate arrays of issues and pull requests:
|
||||
issues.partition do |x|
|
||||
x[:pull_request].nil?
|
||||
end
|
||||
end
|
||||
|
||||
# Fetch all pull requests. We need them to detect :merged_at parameter
|
||||
# @return [Array] all pull requests
|
||||
def fetch_closed_pull_requests
|
||||
pull_requests = []
|
||||
begin
|
||||
response = if @options[:release_branch].nil?
|
||||
@github.pull_requests.list @options[:user],
|
||||
@options[:project],
|
||||
state: "closed"
|
||||
else
|
||||
@github.pull_requests.list @options[:user],
|
||||
@options[:project],
|
||||
state: "closed",
|
||||
base: @options[:release_branch]
|
||||
end
|
||||
page_i = 0
|
||||
count_pages = response.count_pages
|
||||
response.each_page do |page|
|
||||
page_i += PER_PAGE_NUMBER
|
||||
log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}"
|
||||
print_in_same_line(log_string)
|
||||
pull_requests.concat(page)
|
||||
end
|
||||
print_empty_line
|
||||
rescue Github::Error::Forbidden => e
|
||||
Helper.log.warn e.error_messages.map { |m| m[:message] }.join(", ")
|
||||
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
|
||||
end
|
||||
|
||||
Helper.log.info "Fetching merged dates: #{pull_requests.count}"
|
||||
pull_requests
|
||||
end
|
||||
|
||||
# Print specified line on the same string
|
||||
# @param [String] log_string
|
||||
def print_in_same_line(log_string)
|
||||
print log_string + "\r"
|
||||
end
|
||||
|
||||
# Print long line with spaces on same line to clear prev message
|
||||
def print_empty_line
|
||||
print_in_same_line(" ")
|
||||
end
|
||||
|
||||
# Fetch event for all issues and add them to :events
|
||||
# @param [Array] issues
|
||||
# @return [Void]
|
||||
def fetch_events_async(issues)
|
||||
i = 0
|
||||
threads = []
|
||||
issues.each_slice(MAX_SIMULTANEOUS_REQUESTS) do |issues_slice|
|
||||
issues_slice.each do |issue|
|
||||
threads << Thread.new do
|
||||
begin
|
||||
response = @github.issues.events.list user: @options[:user],
|
||||
repo: @options[:project],
|
||||
issue_number: issue["number"]
|
||||
issue[:events] = []
|
||||
response.each_page do |page|
|
||||
issue[:events].concat(page)
|
||||
end
|
||||
rescue Github::Error::Forbidden => e
|
||||
Helper.log.warn e.error_messages.map { |m| m[:message] }.join(", ")
|
||||
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
|
||||
end
|
||||
print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
|
||||
i += 1
|
||||
end
|
||||
end
|
||||
threads.each(&:join)
|
||||
threads = []
|
||||
end
|
||||
|
||||
# to clear line from prev print
|
||||
print_empty_line
|
||||
|
||||
Helper.log.info "Fetching events for issues and PR: #{i}"
|
||||
end
|
||||
|
||||
# Fetch tag time from repo
|
||||
#
|
||||
# @param [Hash] tag
|
||||
# @return [Time] time of specified tag
|
||||
def fetch_date_of_tag(tag)
|
||||
begin
|
||||
commit_data = @github.git_data.commits.get @options[:user],
|
||||
@options[:project],
|
||||
tag["commit"]["sha"]
|
||||
rescue Github::Error::Forbidden => e
|
||||
Helper.log.warn e.error_messages.map { |m| m[:message] }.join(", ")
|
||||
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
|
||||
end
|
||||
time_string = commit_data["committer"]["date"]
|
||||
Time.parse(time_string)
|
||||
end
|
||||
|
||||
# Fetch commit for specified event
|
||||
# @return [Hash]
|
||||
def fetch_commit(event)
|
||||
@github.git_data.commits.get @options[:user], @options[:project], event[:commit_id]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,5 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
require_relative "../fetcher"
|
||||
require_relative "../octo_fetcher"
|
||||
require_relative "generator_generation"
|
||||
require_relative "generator_fetcher"
|
||||
require_relative "generator_processor"
|
||||
|
@ -11,25 +11,25 @@ module GitHubChangelogGenerator
|
|||
end
|
||||
|
||||
class Generator
|
||||
attr_accessor :options, :filtered_tags, :github
|
||||
attr_accessor :options, :filtered_tags, :github, :tag_section_mapping
|
||||
|
||||
# A Generator responsible for all logic, related with change log generation from ready-to-parse issues
|
||||
#
|
||||
# Example:
|
||||
# generator = GitHubChangelogGenerator::Generator.new
|
||||
# content = generator.compound_changelog
|
||||
def initialize(options = nil)
|
||||
@options = options || {}
|
||||
def initialize(options = {})
|
||||
@options = options
|
||||
@tag_times_hash = {}
|
||||
@fetcher = GitHubChangelogGenerator::Fetcher.new @options
|
||||
@fetcher = GitHubChangelogGenerator::OctoFetcher.new(options)
|
||||
end
|
||||
|
||||
def fetch_issues_and_pr
|
||||
issues, pull_requests = @fetcher.fetch_closed_issues_and_pr
|
||||
|
||||
@pull_requests = @options[:pulls] ? get_filtered_pull_requests(pull_requests) : []
|
||||
@pull_requests = options[:pulls] ? get_filtered_pull_requests(pull_requests) : []
|
||||
|
||||
@issues = @options[:issues] ? get_filtered_issues(issues) : []
|
||||
@issues = options[:issues] ? get_filtered_issues(issues) : []
|
||||
|
||||
fetch_events_for_issues_and_pr
|
||||
detect_actual_closed_dates(@issues + @pull_requests)
|
||||
|
@ -61,18 +61,18 @@ module GitHubChangelogGenerator
|
|||
newer_tag_link, newer_tag_name, newer_tag_time = detect_link_tag_time(newer_tag)
|
||||
|
||||
github_site = options[:github_site] || "https://github.com"
|
||||
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
|
||||
project_url = "#{github_site}/#{options[:user]}/#{options[:project]}"
|
||||
|
||||
log = generate_header(newer_tag_name, newer_tag_link, newer_tag_time, older_tag_name, project_url)
|
||||
|
||||
if @options[:issues]
|
||||
if options[:issues]
|
||||
# Generate issues:
|
||||
log += issues_to_log(issues, pull_requests)
|
||||
end
|
||||
|
||||
if @options[:pulls]
|
||||
if options[:pulls]
|
||||
# Generate pull requests:
|
||||
log += generate_sub_section(pull_requests, @options[:merge_prefix])
|
||||
log += generate_sub_section(pull_requests, options[:merge_prefix])
|
||||
end
|
||||
|
||||
log
|
||||
|
@ -87,9 +87,9 @@ module GitHubChangelogGenerator
|
|||
log = ""
|
||||
bugs_a, enhancement_a, issues_a = parse_by_sections(issues, pull_requests)
|
||||
|
||||
log += generate_sub_section(enhancement_a, @options[:enhancement_prefix])
|
||||
log += generate_sub_section(bugs_a, @options[:bug_prefix])
|
||||
log += generate_sub_section(issues_a, @options[:issue_prefix])
|
||||
log += generate_sub_section(enhancement_a, options[:enhancement_prefix])
|
||||
log += generate_sub_section(bugs_a, options[:bug_prefix])
|
||||
log += generate_sub_section(issues_a, options[:issue_prefix])
|
||||
log
|
||||
end
|
||||
|
||||
|
@ -106,32 +106,32 @@ module GitHubChangelogGenerator
|
|||
|
||||
issues.each do |dict|
|
||||
added = false
|
||||
dict.labels.each do |label|
|
||||
if @options[:bug_labels].include? label.name
|
||||
bugs_a.push dict
|
||||
dict["labels"].each do |label|
|
||||
if options[:bug_labels].include?(label["name"])
|
||||
bugs_a.push(dict)
|
||||
added = true
|
||||
next
|
||||
end
|
||||
if @options[:enhancement_labels].include? label.name
|
||||
enhancement_a.push dict
|
||||
if options[:enhancement_labels].include?(label["name"])
|
||||
enhancement_a.push(dict)
|
||||
added = true
|
||||
next
|
||||
end
|
||||
end
|
||||
issues_a.push dict unless added
|
||||
issues_a.push(dict) unless added
|
||||
end
|
||||
|
||||
added_pull_requests = []
|
||||
pull_requests.each do |dict|
|
||||
dict.labels.each do |label|
|
||||
if @options[:bug_labels].include? label.name
|
||||
bugs_a.push dict
|
||||
added_pull_requests.push dict
|
||||
pull_requests.each do |pr|
|
||||
pr["labels"].each do |label|
|
||||
if options[:bug_labels].include?(label["name"])
|
||||
bugs_a.push(pr)
|
||||
added_pull_requests.push(pr)
|
||||
next
|
||||
end
|
||||
if @options[:enhancement_labels].include? label.name
|
||||
enhancement_a.push dict
|
||||
added_pull_requests.push dict
|
||||
if options[:enhancement_labels].include?(label["name"])
|
||||
enhancement_a.push(pr)
|
||||
added_pull_requests.push(pr)
|
||||
next
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
module GitHubChangelogGenerator
|
||||
class Generator
|
||||
MAX_THREAD_NUMBER = 1
|
||||
|
||||
# Fetch event for issues and pull requests
|
||||
# @return [Array] array of fetched issues
|
||||
def fetch_events_for_issues_and_pr
|
||||
if @options[:verbose]
|
||||
if options[:verbose]
|
||||
print "Fetching events for issues and PR: 0/#{@issues.count + @pull_requests.count}\r"
|
||||
end
|
||||
|
||||
|
@ -13,48 +15,47 @@ module GitHubChangelogGenerator
|
|||
end
|
||||
|
||||
# Async fetching of all tags dates
|
||||
def fetch_tags_dates
|
||||
print "Fetching tag dates...\r" if @options[:verbose]
|
||||
def fetch_tags_dates(tags)
|
||||
print "Fetching tag dates...\r" if options[:verbose]
|
||||
# Async fetching tags:
|
||||
threads = []
|
||||
i = 0
|
||||
all = @filtered_tags.count
|
||||
@filtered_tags.each do |tag|
|
||||
all = tags.count
|
||||
tags.each do |tag|
|
||||
print " \r"
|
||||
threads << Thread.new do
|
||||
get_time_of_tag(tag)
|
||||
print "Fetching tags dates: #{i + 1}/#{all}\r" if @options[:verbose]
|
||||
print "Fetching tags dates: #{i + 1}/#{all}\r" if options[:verbose]
|
||||
i += 1
|
||||
end
|
||||
end
|
||||
threads.each(&:join)
|
||||
puts "Fetching tags dates: #{i}" if @options[:verbose]
|
||||
puts "Fetching tags dates: #{i}" if options[:verbose]
|
||||
end
|
||||
|
||||
# Find correct closed dates, if issues was closed by commits
|
||||
def detect_actual_closed_dates(issues)
|
||||
print "Fetching closed dates for issues...\r" if @options[:verbose]
|
||||
print "Fetching closed dates for issues...\r" if options[:verbose]
|
||||
|
||||
max_thread_number = 50
|
||||
issues.each_slice(max_thread_number) do |issues_slice|
|
||||
issues.each_slice(MAX_THREAD_NUMBER) do |issues_slice|
|
||||
threads = []
|
||||
issues_slice.each do |issue|
|
||||
threads << Thread.new { find_closed_date_by_commit(issue) }
|
||||
end
|
||||
threads.each(&:join)
|
||||
end
|
||||
puts "Fetching closed dates for issues: Done!" if @options[:verbose]
|
||||
puts "Fetching closed dates for issues: Done!" if options[:verbose]
|
||||
end
|
||||
|
||||
# Fill :actual_date parameter of specified issue by closed date of the commit, if it was closed by commit.
|
||||
# @param [Hash] issue
|
||||
def find_closed_date_by_commit(issue)
|
||||
unless issue[:events].nil?
|
||||
unless issue["events"].nil?
|
||||
# if it's PR -> then find "merged event", in case of usual issue -> fond closed date
|
||||
compare_string = issue[:merged_at].nil? ? "closed" : "merged"
|
||||
compare_string = issue["merged_at"].nil? ? "closed" : "merged"
|
||||
# reverse! - to find latest closed event. (event goes in date order)
|
||||
issue[:events].reverse!.each do |event|
|
||||
if event[:event].eql? compare_string
|
||||
issue["events"].reverse!.each do |event|
|
||||
if event["event"].eql? compare_string
|
||||
set_date_from_event(event, issue)
|
||||
break
|
||||
end
|
||||
|
@ -68,15 +69,17 @@ module GitHubChangelogGenerator
|
|||
# @param [Hash] event
|
||||
# @param [Hash] issue
|
||||
def set_date_from_event(event, issue)
|
||||
if event[:commit_id].nil?
|
||||
issue[:actual_date] = issue[:closed_at]
|
||||
if event["commit_id"].nil?
|
||||
issue["actual_date"] = issue["closed_at"]
|
||||
else
|
||||
begin
|
||||
commit = @fetcher.fetch_commit(event)
|
||||
issue[:actual_date] = commit[:author][:date]
|
||||
issue["actual_date"] = commit["commit"]["author"]["date"]
|
||||
|
||||
# issue['actual_date'] = commit['author']['date']
|
||||
rescue
|
||||
puts "Warning: Can't fetch commit #{event[:commit_id]}. It is probably referenced from another repo."
|
||||
issue[:actual_date] = issue[:closed_at]
|
||||
puts "Warning: Can't fetch commit #{event['commit_id']}. It is probably referenced from another repo."
|
||||
issue["actual_date"] = issue["closed_at"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,48 +6,24 @@ module GitHubChangelogGenerator
|
|||
# @return [String] Generated change log file
|
||||
def compound_changelog
|
||||
fetch_and_filter_tags
|
||||
sort_tags_by_date(@filtered_tags)
|
||||
fetch_issues_and_pr
|
||||
|
||||
log = ""
|
||||
log += @options[:frontmatter] if @options[:frontmatter]
|
||||
log += "#{@options[:header]}\n\n"
|
||||
log += options[:frontmatter] if options[:frontmatter]
|
||||
log += "#{options[:header]}\n\n"
|
||||
|
||||
log += if @options[:unreleased_only]
|
||||
log += if options[:unreleased_only]
|
||||
generate_log_between_tags(filtered_tags[0], nil)
|
||||
else
|
||||
generate_log_for_all_tags
|
||||
end
|
||||
|
||||
log += File.read(@options[:base]) if File.file?(@options[:base])
|
||||
log += File.read(options[:base]) if File.file?(options[:base])
|
||||
|
||||
log += "\n\n\\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
|
||||
@log = log
|
||||
end
|
||||
|
||||
# @return [String] temp method should be removed soon
|
||||
def generate_for_2_tags(log)
|
||||
tag1 = @options[:tag1]
|
||||
tag2 = @options[:tag2]
|
||||
tags_strings = []
|
||||
filtered_tags.each { |x| tags_strings.push(x["name"]) }
|
||||
|
||||
if tags_strings.include?(tag1)
|
||||
if tags_strings.include?(tag2)
|
||||
to_a = tags_strings.map.with_index.to_a
|
||||
hash = Hash[to_a]
|
||||
index1 = hash[tag1]
|
||||
index2 = hash[tag2]
|
||||
log += generate_log_between_tags(all_tags[index1], all_tags[index2])
|
||||
else
|
||||
raise ChangelogGeneratorError, "Can't find tag #{tag2} -> exit"
|
||||
end
|
||||
else
|
||||
raise ChangelogGeneratorError, "Can't find tag #{tag1} -> exit"
|
||||
end
|
||||
log
|
||||
end
|
||||
|
||||
# @param [Array] issues List of issues on sub-section
|
||||
# @param [String] prefix Nae of sub-section
|
||||
# @return [String] Generate ready-to-go sub-section
|
||||
|
@ -77,21 +53,21 @@ module GitHubChangelogGenerator
|
|||
log = ""
|
||||
|
||||
# Generate date string:
|
||||
time_string = newer_tag_time.strftime @options[:date_format]
|
||||
time_string = newer_tag_time.strftime(options[:date_format])
|
||||
|
||||
# Generate tag name and link
|
||||
release_url = if @options[:release_url]
|
||||
format(@options[:release_url], newer_tag_link)
|
||||
release_url = if options[:release_url]
|
||||
format(options[:release_url], newer_tag_link)
|
||||
else
|
||||
"#{project_url}/tree/#{newer_tag_link}"
|
||||
end
|
||||
log += if newer_tag_name.equal? @options[:unreleased_label]
|
||||
log += if newer_tag_name.equal?(options[:unreleased_label])
|
||||
"## [#{newer_tag_name}](#{release_url})\n\n"
|
||||
else
|
||||
"## [#{newer_tag_name}](#{release_url}) (#{time_string})\n"
|
||||
end
|
||||
|
||||
if @options[:compare_link] && older_tag_link
|
||||
if options[:compare_link] && older_tag_link
|
||||
# Generate compare link
|
||||
log += "[Full Changelog](#{project_url}/compare/#{older_tag_link}...#{newer_tag_link})\n\n"
|
||||
end
|
||||
|
@ -119,12 +95,12 @@ module GitHubChangelogGenerator
|
|||
#
|
||||
# @return [Array] filtered issues and pull requests
|
||||
def filter_issues_for_tags(newer_tag, older_tag)
|
||||
filtered_pull_requests = delete_by_time(@pull_requests, :actual_date, older_tag, newer_tag)
|
||||
filtered_issues = delete_by_time(@issues, :actual_date, older_tag, newer_tag)
|
||||
filtered_pull_requests = delete_by_time(@pull_requests, "actual_date", older_tag, newer_tag)
|
||||
filtered_issues = delete_by_time(@issues, "actual_date", older_tag, newer_tag)
|
||||
|
||||
newer_tag_name = newer_tag.nil? ? nil : newer_tag["name"]
|
||||
|
||||
if @options[:filter_issues_by_milestone]
|
||||
if options[:filter_issues_by_milestone]
|
||||
# delete excess irrelevant issues (according milestones). Issue #22.
|
||||
filtered_issues = filter_by_milestone(filtered_issues, newer_tag_name, @issues)
|
||||
filtered_pull_requests = filter_by_milestone(filtered_pull_requests, newer_tag_name, @pull_requests)
|
||||
|
@ -135,15 +111,13 @@ module GitHubChangelogGenerator
|
|||
# The full cycle of generation for whole project
|
||||
# @return [String] The complete change log
|
||||
def generate_log_for_all_tags
|
||||
puts "Generating log..." if @options[:verbose]
|
||||
puts "Generating log..." if options[:verbose]
|
||||
|
||||
log = generate_unreleased_section
|
||||
|
||||
(1...filtered_tags.size).each do |index|
|
||||
log += generate_log_between_tags(filtered_tags[index], filtered_tags[index - 1])
|
||||
end
|
||||
if filtered_tags.any?
|
||||
log += generate_log_between_tags(nil, filtered_tags.last)
|
||||
@tag_section_mapping.each_pair do |_tag_section, left_right_tags|
|
||||
older_tag, newer_tag = left_right_tags
|
||||
log += generate_log_between_tags(older_tag, newer_tag)
|
||||
end
|
||||
|
||||
log
|
||||
|
@ -151,7 +125,7 @@ module GitHubChangelogGenerator
|
|||
|
||||
def generate_unreleased_section
|
||||
log = ""
|
||||
if @options[:unreleased]
|
||||
if options[:unreleased]
|
||||
unreleased_log = generate_log_between_tags(filtered_tags[0], nil)
|
||||
log += unreleased_log if unreleased_log
|
||||
end
|
||||
|
@ -166,24 +140,24 @@ module GitHubChangelogGenerator
|
|||
# @param [Hash] issue Fetched issue from GitHub
|
||||
# @return [String] Markdown-formatted single issue
|
||||
def get_string_for_issue(issue)
|
||||
encapsulated_title = encapsulate_string issue[:title]
|
||||
encapsulated_title = encapsulate_string issue["title"]
|
||||
|
||||
title_with_number = "#{encapsulated_title} [\\##{issue[:number]}](#{issue.html_url})"
|
||||
title_with_number = "#{encapsulated_title} [\\##{issue['number']}](#{issue['html_url']})"
|
||||
issue_line_with_user(title_with_number, issue)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def issue_line_with_user(line, issue)
|
||||
return line if !@options[:author] || issue.pull_request.nil?
|
||||
return line if !options[:author] || issue.pull_request.nil?
|
||||
|
||||
user = issue.user
|
||||
user = issue["user"]
|
||||
return "#{line} ({Null user})" unless user
|
||||
|
||||
if @options[:usernames_as_github_logins]
|
||||
"#{line} (@#{user.login})"
|
||||
if options[:usernames_as_github_logins]
|
||||
"#{line} (@#{user['login']})"
|
||||
else
|
||||
"#{line} ([#{user.login}](#{user.html_url}))"
|
||||
"#{line} ([#{user['login']}](#{user['html_url']}))"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
module GitHubChangelogGenerator
|
||||
class Generator
|
||||
# delete all labels with labels from @options[:exclude_labels] array
|
||||
# delete all labels with labels from options[:exclude_labels] array
|
||||
# @param [Array] issues
|
||||
# @return [Array] filtered array
|
||||
def exclude_issues_by_labels(issues)
|
||||
return issues if !@options[:exclude_labels] || @options[:exclude_labels].empty?
|
||||
return issues if !options[:exclude_labels] || options[:exclude_labels].empty?
|
||||
|
||||
issues.reject do |issue|
|
||||
labels = issue.labels.map(&:name)
|
||||
(labels & @options[:exclude_labels]).any?
|
||||
labels = issue["labels"].map { |l| l["name"] }
|
||||
(labels & options[:exclude_labels]).any?
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -32,18 +32,18 @@ module GitHubChangelogGenerator
|
|||
# @return [Array] issues with milestone #tag_name
|
||||
def find_issues_to_add(all_issues, tag_name)
|
||||
all_issues.select do |issue|
|
||||
if issue.milestone.nil?
|
||||
if issue["milestone"].nil?
|
||||
false
|
||||
else
|
||||
# check, that this milestone in tag list:
|
||||
milestone_is_tag = @filtered_tags.find do |tag|
|
||||
tag.name == issue.milestone.title
|
||||
tag["name"] == issue["milestone"]["title"]
|
||||
end
|
||||
|
||||
if milestone_is_tag.nil?
|
||||
false
|
||||
else
|
||||
issue.milestone.title == tag_name
|
||||
issue["milestone"]["title"] == tag_name
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -53,11 +53,11 @@ module GitHubChangelogGenerator
|
|||
def remove_issues_in_milestones(filtered_issues)
|
||||
filtered_issues.select! do |issue|
|
||||
# leave issues without milestones
|
||||
if issue.milestone.nil?
|
||||
if issue["milestone"].nil?
|
||||
true
|
||||
else
|
||||
# check, that this milestone in tag list:
|
||||
@filtered_tags.find { |tag| tag.name == issue.milestone.title }.nil?
|
||||
@filtered_tags.find { |tag| tag["name"] == issue["milestone"]["title"] }.nil?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -68,7 +68,7 @@ module GitHubChangelogGenerator
|
|||
# @param [String] older_tag all issues before this tag date will be excluded. May be nil, if it's first tag
|
||||
# @param [String] newer_tag all issue after this tag will be excluded. May be nil for unreleased section
|
||||
# @return [Array] filtered issues
|
||||
def delete_by_time(issues, hash_key = :actual_date, older_tag = nil, newer_tag = nil)
|
||||
def delete_by_time(issues, hash_key = "actual_date", older_tag = nil, newer_tag = nil)
|
||||
# in case if not tags specified - return unchanged array
|
||||
return issues if older_tag.nil? && newer_tag.nil?
|
||||
|
||||
|
@ -77,7 +77,7 @@ module GitHubChangelogGenerator
|
|||
|
||||
issues.select do |issue|
|
||||
if issue[hash_key]
|
||||
time = Time.parse(issue[hash_key]).utc
|
||||
time = Time.parse(issue[hash_key].to_s).utc
|
||||
|
||||
tag_in_range_old = tag_newer_old_tag?(older_tag_time, time)
|
||||
|
||||
|
@ -121,9 +121,9 @@ module GitHubChangelogGenerator
|
|||
|
||||
# @return [Array] issues without labels or empty array if add_issues_wo_labels is false
|
||||
def filter_wo_labels(issues)
|
||||
if @options[:add_issues_wo_labels]
|
||||
if options[:add_issues_wo_labels]
|
||||
issues_wo_labels = issues.select do |issue|
|
||||
!issue.labels.map(&:name).any?
|
||||
!issue["labels"].map { |l| l["name"] }.any?
|
||||
end
|
||||
return issues_wo_labels
|
||||
end
|
||||
|
@ -131,11 +131,11 @@ module GitHubChangelogGenerator
|
|||
end
|
||||
|
||||
def filter_by_include_labels(issues)
|
||||
if @options[:include_labels].nil?
|
||||
if options[:include_labels].nil?
|
||||
issues
|
||||
else
|
||||
issues.select do |issue|
|
||||
labels = issue.labels.map(&:name) & @options[:include_labels]
|
||||
labels = issue["labels"].map { |l| l["name"] } & options[:include_labels]
|
||||
labels.any?
|
||||
end
|
||||
end
|
||||
|
@ -154,18 +154,18 @@ module GitHubChangelogGenerator
|
|||
# @return [Array] Filtered issues
|
||||
def get_filtered_issues(issues)
|
||||
issues = filter_array_by_labels(issues)
|
||||
puts "Filtered issues: #{issues.count}" if @options[:verbose]
|
||||
puts "Filtered issues: #{issues.count}" if options[:verbose]
|
||||
issues
|
||||
end
|
||||
|
||||
# This method fetches missing params for PR and filter them by specified options
|
||||
# It include add all PR's with labels from @options[:include_labels] array
|
||||
# It include add all PR's with labels from options[:include_labels] array
|
||||
# And exclude all from :exclude_labels array.
|
||||
# @return [Array] filtered PR's
|
||||
def get_filtered_pull_requests(pull_requests)
|
||||
pull_requests = filter_array_by_labels(pull_requests)
|
||||
pull_requests = filter_merged_pull_requests(pull_requests)
|
||||
puts "Filtered pull requests: #{pull_requests.count}" if @options[:verbose]
|
||||
puts "Filtered pull requests: #{pull_requests.count}" if options[:verbose]
|
||||
pull_requests
|
||||
end
|
||||
|
||||
|
@ -174,21 +174,21 @@ module GitHubChangelogGenerator
|
|||
# :merged_at - is a date, when issue PR was merged.
|
||||
# More correct to use merged date, rather than closed date.
|
||||
def filter_merged_pull_requests(pull_requests)
|
||||
print "Fetching merged dates...\r" if @options[:verbose]
|
||||
print "Fetching merged dates...\r" if options[:verbose]
|
||||
closed_pull_requests = @fetcher.fetch_closed_pull_requests
|
||||
|
||||
pull_requests.each do |pr|
|
||||
fetched_pr = closed_pull_requests.find do |fpr|
|
||||
fpr.number == pr.number
|
||||
fpr["number"] == pr["number"]
|
||||
end
|
||||
if fetched_pr
|
||||
pr[:merged_at] = fetched_pr[:merged_at]
|
||||
pr["merged_at"] = fetched_pr["merged_at"]
|
||||
closed_pull_requests.delete(fetched_pr)
|
||||
end
|
||||
end
|
||||
|
||||
pull_requests.select! do |pr|
|
||||
!pr[:merged_at].nil?
|
||||
!pr["merged_at"].nil?
|
||||
end
|
||||
|
||||
pull_requests
|
||||
|
|
|
@ -3,33 +3,59 @@ module GitHubChangelogGenerator
|
|||
class Generator
|
||||
# fetch, filter tags, fetch dates and sort them in time order
|
||||
def fetch_and_filter_tags
|
||||
@filtered_tags = get_filtered_tags(@fetcher.get_all_tags)
|
||||
fetch_tags_dates
|
||||
detect_since_tag
|
||||
detect_due_tag
|
||||
|
||||
all_tags = @fetcher.get_all_tags
|
||||
included_tags = filter_excluded_tags(all_tags)
|
||||
|
||||
fetch_tags_dates(all_tags) # Creates a Hash @tag_times_hash
|
||||
sorted_tags = sort_tags_by_date(included_tags)
|
||||
@filtered_tags = get_filtered_tags(included_tags)
|
||||
|
||||
@tag_section_mapping = build_tag_section_mapping(@filtered_tags, sorted_tags)
|
||||
|
||||
@filtered_tags
|
||||
end
|
||||
|
||||
# Sort all tags by date
|
||||
# @param [Array] filtered_tags are the tags that need a subsection output
|
||||
# @param [Array] all_tags is the list of all tags ordered from newest -> oldest
|
||||
# @return [Hash] key is the tag to output, value is an array of [Left Tag, Right Tag]
|
||||
# PRs to include in this section will be >= [Left Tag Date] and <= [Right Tag Date]
|
||||
def build_tag_section_mapping(filtered_tags, all_tags)
|
||||
tag_mapping = {}
|
||||
filtered_tags.each do |tag|
|
||||
older_tag_idx = all_tags.index(tag) + 1
|
||||
older_tag = all_tags[older_tag_idx]
|
||||
tag_mapping[tag] = [older_tag, tag]
|
||||
end
|
||||
tag_mapping
|
||||
end
|
||||
|
||||
# Sort all tags by date, newest to oldest
|
||||
def sort_tags_by_date(tags)
|
||||
puts "Sorting tags..." if @options[:verbose]
|
||||
puts "Sorting tags..." if options[:verbose]
|
||||
tags.sort_by! do |x|
|
||||
get_time_of_tag(x)
|
||||
end.reverse!
|
||||
end
|
||||
|
||||
# Try to find tag date in local hash.
|
||||
# Otherwise fFetch tag time and put it to local hash file.
|
||||
# @param [Hash] tag_name name of the tag
|
||||
# Returns date for given GitHub Tag hash
|
||||
#
|
||||
# Memoize the date by tag name.
|
||||
#
|
||||
# @param [Hash] tag_name
|
||||
#
|
||||
# @return [Time] time of specified tag
|
||||
def get_time_of_tag(tag_name)
|
||||
raise ChangelogGeneratorError, "tag_name is nil" if tag_name.nil?
|
||||
|
||||
name_of_tag = tag_name["name"]
|
||||
time_for_name = @tag_times_hash[name_of_tag]
|
||||
if !time_for_name.nil?
|
||||
time_for_name
|
||||
else
|
||||
time_string = @fetcher.fetch_date_of_tag tag_name
|
||||
name_of_tag = tag_name.fetch("name")
|
||||
time_for_tag_name = @tag_times_hash[name_of_tag]
|
||||
return time_for_tag_name if time_for_tag_name
|
||||
|
||||
@fetcher.fetch_date_of_tag(tag_name).tap do |time_string|
|
||||
@tag_times_hash[name_of_tag] = time_string
|
||||
time_string
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -42,12 +68,12 @@ module GitHubChangelogGenerator
|
|||
newer_tag_time = newer_tag.nil? ? Time.new : get_time_of_tag(newer_tag)
|
||||
|
||||
# if it's future release tag - set this value
|
||||
if newer_tag.nil? && @options[:future_release]
|
||||
newer_tag_name = @options[:future_release]
|
||||
newer_tag_link = @options[:future_release]
|
||||
if newer_tag.nil? && options[:future_release]
|
||||
newer_tag_name = options[:future_release]
|
||||
newer_tag_link = options[:future_release]
|
||||
else
|
||||
# put unreleased label if there is no name for the tag
|
||||
newer_tag_name = newer_tag.nil? ? @options[:unreleased_label] : newer_tag["name"]
|
||||
newer_tag_name = newer_tag.nil? ? options[:unreleased_label] : newer_tag["name"]
|
||||
newer_tag_link = newer_tag.nil? ? "HEAD" : newer_tag_name
|
||||
end
|
||||
[newer_tag_link, newer_tag_name, newer_tag_time]
|
||||
|
@ -55,13 +81,17 @@ module GitHubChangelogGenerator
|
|||
|
||||
# @return [Object] try to find newest tag using #Reader and :base option if specified otherwise returns nil
|
||||
def detect_since_tag
|
||||
@since_tag ||= @options.fetch(:since_tag) { version_of_first_item }
|
||||
@since_tag ||= options.fetch(:since_tag) { version_of_first_item }
|
||||
end
|
||||
|
||||
def detect_due_tag
|
||||
@due_tag ||= options.fetch(:due_tag, nil)
|
||||
end
|
||||
|
||||
def version_of_first_item
|
||||
return unless File.file?(@options[:base].to_s)
|
||||
return unless File.file?(options[:base].to_s)
|
||||
|
||||
sections = GitHubChangelogGenerator::Reader.new.read(@options[:base])
|
||||
sections = GitHubChangelogGenerator::Reader.new.read(options[:base])
|
||||
sections.first["version"] if sections && sections.any?
|
||||
end
|
||||
|
||||
|
@ -70,8 +100,8 @@ module GitHubChangelogGenerator
|
|||
# @return [Array]
|
||||
def get_filtered_tags(all_tags)
|
||||
filtered_tags = filter_since_tag(all_tags)
|
||||
filtered_tags = filter_between_tags(filtered_tags)
|
||||
filter_excluded_tags(filtered_tags)
|
||||
filtered_tags = filter_due_tag(filtered_tags)
|
||||
filter_between_tags(filtered_tags)
|
||||
end
|
||||
|
||||
# @param [Array] all_tags all tags
|
||||
|
@ -80,8 +110,8 @@ module GitHubChangelogGenerator
|
|||
filtered_tags = all_tags
|
||||
tag = detect_since_tag
|
||||
if tag
|
||||
if all_tags.map(&:name).include? tag
|
||||
idx = all_tags.index { |t| t.name == tag }
|
||||
if all_tags.map { |t| t["name"] }.include? tag
|
||||
idx = all_tags.index { |t| t["name"] == tag }
|
||||
filtered_tags = if idx > 0
|
||||
all_tags[0..idx - 1]
|
||||
else
|
||||
|
@ -98,13 +128,12 @@ module GitHubChangelogGenerator
|
|||
# @return [Array] filtered tags according :due_tag option
|
||||
def filter_due_tag(all_tags)
|
||||
filtered_tags = all_tags
|
||||
tag = @options[:due_tag]
|
||||
tag = detect_due_tag
|
||||
if tag
|
||||
if all_tags.any? && all_tags.map(&:name).include?(tag)
|
||||
idx = all_tags.index { |t| t.name == tag }
|
||||
last_index = all_tags.count - 1
|
||||
filtered_tags = if idx > 0 && idx < last_index
|
||||
all_tags[idx + 1..last_index]
|
||||
if all_tags.any? && all_tags.map { |t| t["name"] }.include?(tag)
|
||||
idx = all_tags.index { |t| t["name"] == tag }
|
||||
filtered_tags = if idx > 0
|
||||
all_tags[(idx + 1)..-1]
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
@ -119,13 +148,15 @@ module GitHubChangelogGenerator
|
|||
# @return [Array] filtered tags according :between_tags option
|
||||
def filter_between_tags(all_tags)
|
||||
filtered_tags = all_tags
|
||||
if @options[:between_tags]
|
||||
@options[:between_tags].each do |tag|
|
||||
unless all_tags.map(&:name).include? tag
|
||||
tag_names = filtered_tags.map { |ft| ft["name"] }
|
||||
|
||||
if options[:between_tags]
|
||||
options[:between_tags].each do |tag|
|
||||
unless tag_names.include?(tag)
|
||||
Helper.log.warn "Warning: can't find tag #{tag}, specified with --between-tags option."
|
||||
end
|
||||
end
|
||||
filtered_tags = all_tags.select { |tag| @options[:between_tags].include? tag.name }
|
||||
filtered_tags = all_tags.select { |tag| options[:between_tags].include?(tag["name"]) }
|
||||
end
|
||||
filtered_tags
|
||||
end
|
||||
|
@ -133,9 +164,9 @@ module GitHubChangelogGenerator
|
|||
# @param [Array] all_tags all tags
|
||||
# @return [Array] filtered tags according :exclude_tags or :exclude_tags_regex option
|
||||
def filter_excluded_tags(all_tags)
|
||||
if @options[:exclude_tags]
|
||||
if options[:exclude_tags]
|
||||
apply_exclude_tags(all_tags)
|
||||
elsif @options[:exclude_tags_regex]
|
||||
elsif options[:exclude_tags_regex]
|
||||
apply_exclude_tags_regex(all_tags)
|
||||
else
|
||||
all_tags
|
||||
|
@ -145,39 +176,39 @@ module GitHubChangelogGenerator
|
|||
private
|
||||
|
||||
def apply_exclude_tags(all_tags)
|
||||
if @options[:exclude_tags].is_a?(Regexp)
|
||||
filter_tags_with_regex(all_tags, @options[:exclude_tags])
|
||||
if options[:exclude_tags].is_a?(Regexp)
|
||||
filter_tags_with_regex(all_tags, options[:exclude_tags])
|
||||
else
|
||||
filter_exact_tags(all_tags)
|
||||
end
|
||||
end
|
||||
|
||||
def apply_exclude_tags_regex(all_tags)
|
||||
filter_tags_with_regex(all_tags, Regexp.new(@options[:exclude_tags_regex]))
|
||||
filter_tags_with_regex(all_tags, Regexp.new(options[:exclude_tags_regex]))
|
||||
end
|
||||
|
||||
def filter_tags_with_regex(all_tags, regex)
|
||||
warn_if_nonmatching_regex(all_tags)
|
||||
all_tags.reject { |tag| regex =~ tag.name }
|
||||
all_tags.reject { |tag| regex =~ tag["name"] }
|
||||
end
|
||||
|
||||
def filter_exact_tags(all_tags)
|
||||
@options[:exclude_tags].each do |tag|
|
||||
options[:exclude_tags].each do |tag|
|
||||
warn_if_tag_not_found(all_tags, tag)
|
||||
end
|
||||
all_tags.reject { |tag| @options[:exclude_tags].include? tag.name }
|
||||
all_tags.reject { |tag| options[:exclude_tags].include?(tag["name"]) }
|
||||
end
|
||||
|
||||
def warn_if_nonmatching_regex(all_tags)
|
||||
unless all_tags.map(&:name).any? { |t| @options[:exclude_tags] =~ t }
|
||||
unless all_tags.map { |t| t["name"] }.any? { |t| options[:exclude_tags] =~ t }
|
||||
Helper.log.warn "Warning: unable to reject any tag, using regex "\
|
||||
"#{@options[:exclude_tags].inspect} in --exclude-tags "\
|
||||
"#{options[:exclude_tags].inspect} in --exclude-tags "\
|
||||
"option."
|
||||
end
|
||||
end
|
||||
|
||||
def warn_if_tag_not_found(all_tags, tag)
|
||||
unless all_tags.map(&:name).include? tag
|
||||
unless all_tags.map { |t| t["name"] }.include?(tag)
|
||||
Helper.log.warn "Warning: can't find tag #{tag}, specified with --exclude-tags option."
|
||||
end
|
||||
end
|
||||
|
|
16
lib/github_changelog_generator/hash.rb
Normal file
16
lib/github_changelog_generator/hash.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
class Hash
|
||||
def stringify_keys_deep!
|
||||
new_hash = {}
|
||||
keys.each do |k|
|
||||
ks = k.respond_to?(:to_s) ? k.to_s : k
|
||||
new_hash[ks] = if values_at(k).first.is_a?(Hash) || values_at(k).first.is_a?(Array)
|
||||
values_at(k).first.send(:stringify_keys_deep!)
|
||||
else
|
||||
values_at(k).first
|
||||
end
|
||||
end
|
||||
|
||||
new_hash
|
||||
end
|
||||
end
|
322
lib/github_changelog_generator/octo_fetcher.rb
Normal file
322
lib/github_changelog_generator/octo_fetcher.rb
Normal file
|
@ -0,0 +1,322 @@
|
|||
# frozen_string_literal: true
|
||||
module GitHubChangelogGenerator
|
||||
# A Fetcher responsible for all requests to GitHub and all basic manipulation with related data
|
||||
# (such as filtering, validating, e.t.c)
|
||||
#
|
||||
# Example:
|
||||
# fetcher = GitHubChangelogGenerator::OctoFetcher.new(options)
|
||||
class OctoFetcher
|
||||
PER_PAGE_NUMBER = 100
|
||||
MAX_THREAD_NUMBER = 1
|
||||
CHANGELOG_GITHUB_TOKEN = "CHANGELOG_GITHUB_TOKEN"
|
||||
GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: Can't finish operation: GitHub API rate limit exceeded, change log may be " \
|
||||
"missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
|
||||
NO_TOKEN_PROVIDED = "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found. " \
|
||||
"This script can make only 50 requests to GitHub API per hour without token!"
|
||||
|
||||
# @param options [Hash] Options passed in
|
||||
# @option options [String] :user GitHub username
|
||||
# @option options [String] :project GitHub project
|
||||
# @option options [String] :since Only issues updated at or after this time are returned. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ. eg. Time.parse("2016-01-01 10:00:00").iso8601
|
||||
# @option options [Boolean] :http_cache Use ActiveSupport::Cache::FileStore to cache http requests
|
||||
# @option options [Boolean] :cache_file If using http_cache, this is the cache file path
|
||||
# @option options [Boolean] :cache_log If using http_cache, this is the cache log file path
|
||||
def initialize(options = {}) # rubocop:disable Metrics/CyclomaticComplexity
|
||||
@options = options || {}
|
||||
@user = @options[:user]
|
||||
@project = @options[:project]
|
||||
@since = @options[:since]
|
||||
@http_cache = @options[:http_cache]
|
||||
@cache_file = @options.fetch(:cache_file, "/tmp/github-changelog-http-cache") if @http_cache
|
||||
@cache_log = @options.fetch(:cache_log, "/tmp/github-changelog-logger.log") if @http_cache
|
||||
init_cache if @http_cache
|
||||
|
||||
@github_token = fetch_github_token
|
||||
|
||||
@request_options = { per_page: PER_PAGE_NUMBER }
|
||||
@github_options = {}
|
||||
@github_options[:access_token] = @github_token unless @github_token.nil?
|
||||
@github_options[:api_endpoint] = @options[:github_endpoint] unless @options[:github_endpoint].nil?
|
||||
|
||||
client_type = @options[:github_endpoint].nil? ? Octokit::Client : Octokit::EnterpriseAdminClient
|
||||
@client = client_type.new(@github_options)
|
||||
end
|
||||
|
||||
def init_cache
|
||||
middleware_opts = {
|
||||
serializer: Marshal,
|
||||
store: ActiveSupport::Cache::FileStore.new(@cache_file),
|
||||
logger: Logger.new(@cache_log),
|
||||
shared_cache: false
|
||||
}
|
||||
stack = Faraday::RackBuilder.new do |builder|
|
||||
builder.use Faraday::HttpCache, middleware_opts
|
||||
builder.use Octokit::Response::RaiseError
|
||||
builder.adapter Faraday.default_adapter
|
||||
# builder.response :logger
|
||||
end
|
||||
Octokit.middleware = stack
|
||||
end
|
||||
|
||||
# Fetch all tags from repo
|
||||
#
|
||||
# @return [Array <Hash>] array of tags
|
||||
def get_all_tags
|
||||
print "Fetching tags...\r" if @options[:verbose]
|
||||
|
||||
check_github_response { github_fetch_tags }
|
||||
end
|
||||
|
||||
# Returns the number of pages for a API call
|
||||
#
|
||||
# @return [Integer] number of pages for this API call in total
|
||||
def calculate_pages(client, method, request_options)
|
||||
# Makes the first API call so that we can call last_response
|
||||
check_github_response do
|
||||
client.send(method, user_project, @request_options.merge(request_options))
|
||||
end
|
||||
|
||||
last_response = client.last_response
|
||||
|
||||
if (last_pg = last_response.rels[:last])
|
||||
parse_url_for_vars(last_pg.href)["page"].to_i
|
||||
else
|
||||
1
|
||||
end
|
||||
end
|
||||
|
||||
# Fill input array with tags
|
||||
#
|
||||
# @return [Array <Hash>] array of tags in repo
|
||||
def github_fetch_tags
|
||||
tags = []
|
||||
page_i = 0
|
||||
count_pages = calculate_pages(@client, "tags", {})
|
||||
|
||||
iterate_pages(@client, "tags", {}) do |new_tags|
|
||||
page_i += PER_PAGE_NUMBER
|
||||
print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
|
||||
tags.concat(new_tags)
|
||||
end
|
||||
print_empty_line
|
||||
|
||||
if tags.count == 0
|
||||
Helper.log.warn "Warning: Can't find any tags in repo.\
|
||||
Make sure, that you push tags to remote repo via 'git push --tags'"
|
||||
else
|
||||
Helper.log.info "Found #{tags.count} tags"
|
||||
end
|
||||
# tags are a Sawyer::Resource. Convert to hash
|
||||
tags = tags.map { |h| h.to_hash.stringify_keys_deep! }
|
||||
tags
|
||||
end
|
||||
|
||||
# This method fetch all closed issues and separate them to pull requests and pure issues
|
||||
# (pull request is kind of issue in term of GitHub)
|
||||
#
|
||||
# @return [Tuple] with (issues [Array <Hash>], pull-requests [Array <Hash>])
|
||||
def fetch_closed_issues_and_pr
|
||||
print "Fetching closed issues...\r" if @options[:verbose]
|
||||
issues = []
|
||||
options = {
|
||||
state: "closed",
|
||||
filter: "all",
|
||||
labels: nil
|
||||
}
|
||||
options[:since] = @since unless @since.nil?
|
||||
|
||||
page_i = 0
|
||||
count_pages = calculate_pages(@client, "issues", options)
|
||||
|
||||
iterate_pages(@client, "issues", options) do |new_issues|
|
||||
page_i += PER_PAGE_NUMBER
|
||||
print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
|
||||
issues.concat(new_issues)
|
||||
break if @options[:max_issues] && issues.length >= @options[:max_issues]
|
||||
end
|
||||
print_empty_line
|
||||
Helper.log.info "Received issues: #{issues.count}"
|
||||
|
||||
issues = issues.map { |h| h.to_hash.stringify_keys_deep! }
|
||||
|
||||
# separate arrays of issues and pull requests:
|
||||
issues.partition do |x|
|
||||
x["pull_request"].nil?
|
||||
end
|
||||
end
|
||||
|
||||
# Fetch all pull requests. We need them to detect :merged_at parameter
|
||||
#
|
||||
# @return [Array <Hash>] all pull requests
|
||||
def fetch_closed_pull_requests
|
||||
pull_requests = []
|
||||
options = { state: "closed" }
|
||||
|
||||
unless @options[:release_branch].nil?
|
||||
options[:base] = @options[:release_branch]
|
||||
end
|
||||
|
||||
page_i = 0
|
||||
count_pages = calculate_pages(@client, "pull_requests", options)
|
||||
|
||||
iterate_pages(@client, "pull_requests", options) do |new_pr|
|
||||
page_i += PER_PAGE_NUMBER
|
||||
log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}"
|
||||
print_in_same_line(log_string)
|
||||
pull_requests.concat(new_pr)
|
||||
end
|
||||
print_empty_line
|
||||
|
||||
Helper.log.info "Pull Request count: #{pull_requests.count}"
|
||||
pull_requests = pull_requests.map { |h| h.to_hash.stringify_keys_deep! }
|
||||
pull_requests
|
||||
end
|
||||
|
||||
# Fetch event for all issues and add them to 'events'
|
||||
#
|
||||
# @param [Array] issues
|
||||
# @return [Void]
|
||||
def fetch_events_async(issues)
|
||||
i = 0
|
||||
threads = []
|
||||
|
||||
issues.each_slice(MAX_THREAD_NUMBER) do |issues_slice|
|
||||
issues_slice.each do |issue|
|
||||
threads << Thread.new do
|
||||
issue["events"] = []
|
||||
iterate_pages(@client, "issue_events", issue["number"], {}) do |new_event|
|
||||
issue["events"].concat(new_event)
|
||||
end
|
||||
issue["events"] = issue["events"].map { |h| h.to_hash.stringify_keys_deep! }
|
||||
print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
|
||||
i += 1
|
||||
end
|
||||
end
|
||||
threads.each(&:join)
|
||||
threads = []
|
||||
end
|
||||
|
||||
# to clear line from prev print
|
||||
print_empty_line
|
||||
|
||||
Helper.log.info "Fetching events for issues and PR: #{i}"
|
||||
end
|
||||
|
||||
# Fetch tag time from repo
|
||||
#
|
||||
# @param [Hash] tag GitHub data item about a Tag
|
||||
#
|
||||
# @return [Time] time of specified tag
|
||||
def fetch_date_of_tag(tag)
|
||||
commit_data = check_github_response { @client.commit(user_project, tag["commit"]["sha"]) }
|
||||
commit_data = commit_data.to_hash.stringify_keys_deep!
|
||||
|
||||
commit_data["commit"]["committer"]["date"]
|
||||
end
|
||||
|
||||
# Fetch commit for specified event
|
||||
#
|
||||
# @return [Hash]
|
||||
def fetch_commit(event)
|
||||
check_github_response do
|
||||
commit = @client.commit(user_project, event["commit_id"])
|
||||
commit = commit.to_hash.stringify_keys_deep!
|
||||
commit
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Iterates through all pages until there are no more :next pages to follow
|
||||
# yields the result per page
|
||||
#
|
||||
# @param [Octokit::Client] client
|
||||
# @param [String] method (eg. 'tags')
|
||||
# @return [Integer] total number of pages
|
||||
def iterate_pages(client, method, *args)
|
||||
if args.size == 1 && args.first.is_a?(Hash)
|
||||
request_options = args.delete_at(0)
|
||||
elsif args.size > 1 && args.last.is_a?(Hash)
|
||||
request_options = args.delete_at(args.length - 1)
|
||||
end
|
||||
|
||||
args.push(@request_options.merge(request_options))
|
||||
|
||||
pages = 1
|
||||
|
||||
check_github_response do
|
||||
client.send(method, user_project, *args)
|
||||
end
|
||||
last_response = client.last_response
|
||||
|
||||
yield last_response.data
|
||||
|
||||
until (next_one = last_response.rels[:next]).nil?
|
||||
pages += 1
|
||||
|
||||
last_response = check_github_response { next_one.get }
|
||||
yield last_response.data
|
||||
end
|
||||
|
||||
pages
|
||||
end
|
||||
|
||||
# This is wrapper with rescue block
|
||||
#
|
||||
# @return [Object] returns exactly the same, what you put in the block, but wrap it with begin-rescue block
|
||||
def check_github_response
|
||||
begin
|
||||
value = yield
|
||||
rescue Octokit::Unauthorized => e
|
||||
Helper.log.error e.message
|
||||
abort "Error: wrong GitHub token"
|
||||
rescue Octokit::Forbidden => e
|
||||
Helper.log.warn e.message
|
||||
Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
|
||||
Helper.log.warn @client.rate_limit
|
||||
end
|
||||
value
|
||||
end
|
||||
|
||||
# Print specified line on the same string
|
||||
#
|
||||
# @param [String] log_string
|
||||
def print_in_same_line(log_string)
|
||||
print log_string + "\r"
|
||||
end
|
||||
|
||||
# Print long line with spaces on same line to clear prev message
|
||||
def print_empty_line
|
||||
print_in_same_line(" ")
|
||||
end
|
||||
|
||||
# Returns GitHub token. First try to use variable, provided by --token option,
|
||||
# otherwise try to fetch it from CHANGELOG_GITHUB_TOKEN env variable.
|
||||
#
|
||||
# @return [String]
|
||||
def fetch_github_token
|
||||
env_var = @options[:token] ? @options[:token] : (ENV.fetch CHANGELOG_GITHUB_TOKEN, nil)
|
||||
|
||||
Helper.log.warn NO_TOKEN_PROVIDED unless env_var
|
||||
|
||||
env_var
|
||||
end
|
||||
|
||||
# @return [String] helper to return Github "user/project"
|
||||
def user_project
|
||||
"#{@options[:user]}/#{@options[:project]}"
|
||||
end
|
||||
|
||||
# Parses a URI and returns a hash of all GET variables
|
||||
#
|
||||
# @param [String] uri eg. https://api.github.com/repositories/43914960/tags?page=37&foo=1
|
||||
# @return [Hash] of all GET variables. eg. { 'page' => 37, 'foo' => 1 }
|
||||
def parse_url_for_vars(uri)
|
||||
URI(uri).query.split("&").each_with_object({}) do |get_var, params|
|
||||
k, v = get_var.split("=")
|
||||
params[k] = v
|
||||
params
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -161,6 +161,15 @@ module GitHubChangelogGenerator
|
|||
opts.on("--release-branch [RELEASE-BRANCH]", "Limit pull requests to the release branch, such as master or release") do |release_branch|
|
||||
options[:release_branch] = release_branch
|
||||
end
|
||||
opts.on("--[no-]http-cache", "Use HTTP Cache to cache Github API requests (useful for large repos) Default is true.") do |http_cache|
|
||||
options[:http_cache] = http_cache
|
||||
end
|
||||
opts.on("--cache-file [CACHE-FILE]", "Filename to use for cache. Default is /tmp/github-changelog-http-cache") do |cache_file|
|
||||
options[:cache_file] = cache_file
|
||||
end
|
||||
opts.on("--cache-log [CACHE-LOG]", "Filename to use for cache log. Default is /tmp/github-changelog-logger.log") do |cache_log|
|
||||
options[:cache_log] = cache_log
|
||||
end
|
||||
opts.on("--[no-]verbose", "Run verbosely. Default is true") do |v|
|
||||
options[:verbose] = v
|
||||
end
|
||||
|
@ -204,7 +213,10 @@ module GitHubChangelogGenerator
|
|||
issue_prefix: "**Closed issues:**",
|
||||
bug_prefix: "**Fixed bugs:**",
|
||||
enhancement_prefix: "**Implemented enhancements:**",
|
||||
git_remote: "origin"
|
||||
git_remote: "origin",
|
||||
http_cache: true,
|
||||
cache_file: "/tmp/github-changelog-http-cache",
|
||||
cache_log: "/tmp/github-changelog-logger.log"
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
require "codeclimate-test-reporter"
|
||||
require "simplecov"
|
||||
require "coveralls"
|
||||
require "vcr"
|
||||
require "webmock/rspec"
|
||||
|
||||
# This module is only used to check the environment is currently a testing env
|
||||
module SpecHelper
|
||||
|
@ -36,6 +38,25 @@ end
|
|||
require "github_changelog_generator"
|
||||
require "github_changelog_generator/task"
|
||||
|
||||
VCR.configure do |c|
|
||||
c.allow_http_connections_when_no_cassette = true
|
||||
c.cassette_library_dir = "spec/vcr"
|
||||
c.ignore_localhost = true
|
||||
c.default_cassette_options = {
|
||||
record: :new_episodes,
|
||||
serialize_with: :json,
|
||||
preserve_exact_body_bytes: true,
|
||||
decode_compressed_response: true
|
||||
}
|
||||
c.filter_sensitive_data("<GITHUB_TOKEN>") do
|
||||
"token #{ENV.fetch('CHANGELOG_GITHUB_TOKEN') { 'frobnitz' }}"
|
||||
end
|
||||
|
||||
c.configure_rspec_metadata!
|
||||
|
||||
c.hook_into :webmock, :faraday
|
||||
end
|
||||
|
||||
RSpec.configure do |config|
|
||||
config.expect_with :rspec do |expectations|
|
||||
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
VALID_TOKEN = "0123456789abcdef"
|
||||
INVALID_TOKEN = "0000000000000000"
|
||||
|
||||
DEFAULT_OPTIONS = { user: "skywinder",
|
||||
project: "changelog_test" }
|
||||
|
||||
def options_with_invalid_token
|
||||
options = DEFAULT_OPTIONS
|
||||
options[:token] = INVALID_TOKEN
|
||||
options
|
||||
end
|
||||
|
||||
describe GitHubChangelogGenerator::Fetcher do
|
||||
before(:all) do
|
||||
@fetcher = GitHubChangelogGenerator::Fetcher.new
|
||||
end
|
||||
|
||||
describe "#fetch_github_token" do
|
||||
token = GitHubChangelogGenerator::Fetcher::CHANGELOG_GITHUB_TOKEN
|
||||
context "when token in ENV exist" do
|
||||
before { stub_const("ENV", ENV.to_hash.merge(token => VALID_TOKEN)) }
|
||||
subject { @fetcher.fetch_github_token }
|
||||
it { is_expected.to eq(VALID_TOKEN) }
|
||||
end
|
||||
context "when token in ENV is nil" do
|
||||
before { stub_const("ENV", ENV.to_hash.merge(token => nil)) }
|
||||
subject { @fetcher.fetch_github_token }
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
context "when token in options and ENV is nil" do
|
||||
before do
|
||||
stub_const("ENV", ENV.to_hash.merge(token => nil))
|
||||
@fetcher = GitHubChangelogGenerator::Fetcher.new(token: VALID_TOKEN)
|
||||
end
|
||||
subject { @fetcher.fetch_github_token }
|
||||
it { is_expected.to eq(VALID_TOKEN) }
|
||||
end
|
||||
context "when token in options and ENV specified" do
|
||||
before do
|
||||
stub_const("ENV", ENV.to_hash.merge(token => "no_matter_what"))
|
||||
@fetcher = GitHubChangelogGenerator::Fetcher.new(token: VALID_TOKEN)
|
||||
end
|
||||
subject { @fetcher.fetch_github_token }
|
||||
it { is_expected.to eq(VALID_TOKEN) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "#github_fetch_tags" do
|
||||
context "when wrong token provided" do
|
||||
before do
|
||||
options = options_with_invalid_token
|
||||
@fetcher = GitHubChangelogGenerator::Fetcher.new(options)
|
||||
end
|
||||
it "should raise Unauthorized error" do
|
||||
expect { @fetcher.github_fetch_tags }.to raise_error Github::Error::Unauthorized
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -2,10 +2,10 @@
|
|||
module GitHubChangelogGenerator
|
||||
describe Generator do
|
||||
context "#exclude_issues_by_labels" do
|
||||
let(:label) { double("the-bad-label", name: "BAD") }
|
||||
let(:issue) { double("the-issue-to-be-excluded", labels: [label]) }
|
||||
let(:good_label) { double("a-good-label", name: "GOOD") }
|
||||
let(:good_issue) { double("an-issue-to-be-kept", labels: [good_label]) }
|
||||
let(:label) { { "name" => "BAD" } }
|
||||
let(:issue) { { "labels" => [label] } }
|
||||
let(:good_label) { { "name" => "GOOD" } }
|
||||
let(:good_issue) { { "labels" => [good_label] } }
|
||||
let(:issues) { [issue, good_issue] }
|
||||
subject(:generator) { described_class.new(exclude_labels: %w(BAD BOO)) }
|
||||
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
describe GitHubChangelogGenerator::Generator do
|
||||
def tag_mash_with_name(tag)
|
||||
Hashie::Mash.new.tap { |mash_tag| mash_tag.name = tag }
|
||||
def tag_with_name(tag)
|
||||
{
|
||||
"name" => tag
|
||||
}
|
||||
end
|
||||
|
||||
def tags_mash_from_strings(tags_strings)
|
||||
def tags_from_strings(tags_strings)
|
||||
tags_strings.map do |tag|
|
||||
tag_mash_with_name(tag)
|
||||
tag_with_name(tag)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -17,20 +19,20 @@ describe GitHubChangelogGenerator::Generator do
|
|||
end
|
||||
|
||||
subject do
|
||||
@generator.get_filtered_tags(tags_mash_from_strings(%w(1 2 3)))
|
||||
@generator.get_filtered_tags(tags_from_strings(%w(1 2 3)))
|
||||
end
|
||||
it { is_expected.to be_a(Array) }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
context "when between_tags same as input array" do
|
||||
before do
|
||||
@generator = GitHubChangelogGenerator::Generator.new(between_tags: %w(1 2 3))
|
||||
end
|
||||
subject do
|
||||
@generator.get_filtered_tags(tags_mash_from_strings(%w(1 2 3)))
|
||||
@generator.get_filtered_tags(tags_from_strings(%w(1 2 3)))
|
||||
end
|
||||
it { is_expected.to be_a(Array) }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
|
||||
context "when between_tags filled with correct values" do
|
||||
|
@ -38,10 +40,10 @@ describe GitHubChangelogGenerator::Generator do
|
|||
@generator = GitHubChangelogGenerator::Generator.new(between_tags: %w(1 2))
|
||||
end
|
||||
subject do
|
||||
@generator.get_filtered_tags(tags_mash_from_strings(%w(1 2 3)))
|
||||
@generator.get_filtered_tags(tags_from_strings(%w(1 2 3)))
|
||||
end
|
||||
it { is_expected.to be_a(Array) }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2))) }
|
||||
end
|
||||
|
||||
context "when between_tags filled with invalid values" do
|
||||
|
@ -50,133 +52,134 @@ describe GitHubChangelogGenerator::Generator do
|
|||
end
|
||||
|
||||
subject do
|
||||
@generator.get_filtered_tags(tags_mash_from_strings(%w(1 2 3)))
|
||||
@generator.get_filtered_tags(tags_from_strings(%w(1 2 3)))
|
||||
end
|
||||
it { is_expected.to be_a(Array) }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1))) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "#get_filtered_tags" do
|
||||
subject do
|
||||
generator.get_filtered_tags(tags_mash_from_strings(%w(1 2 3 4 5)))
|
||||
generator.get_filtered_tags(tags_from_strings(%w(1 2 3 4 5)))
|
||||
end
|
||||
|
||||
context "with excluded and between tags" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(between_tags: %w(1 2 3), exclude_tags: %w(2)) }
|
||||
context "respects between tags" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(between_tags: %w(1 2 3)) }
|
||||
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "#filter_excluded_tags" do
|
||||
subject { generator.filter_excluded_tags(tags_mash_from_strings(%w(1 2 3))) }
|
||||
subject { generator.filter_excluded_tags(tags_from_strings(%w(1 2 3))) }
|
||||
|
||||
context "with matching string" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(exclude_tags: %w(3)) }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2))) }
|
||||
end
|
||||
|
||||
context "with non-matching string" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(exclude_tags: %w(invalid tags)) }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
|
||||
context "with matching regex" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(exclude_tags: /[23]/) }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1))) }
|
||||
end
|
||||
|
||||
context "with non-matching regex" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(exclude_tags: /[abc]/) }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "#filter_excluded_tags_regex" do
|
||||
subject { generator.filter_excluded_tags(tags_mash_from_strings(%w(1 2 3))) }
|
||||
subject { generator.filter_excluded_tags(tags_from_strings(%w(1 2 3))) }
|
||||
|
||||
context "with matching regex" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(exclude_tags_regex: "[23]") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1))) }
|
||||
end
|
||||
|
||||
context "with non-matching regex" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(exclude_tags_regex: "[45]") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "#filter_since_tag" do
|
||||
context "with filled array" do
|
||||
subject { generator.filter_since_tag(tags_mash_from_strings(%w(1 2 3))) }
|
||||
subject { generator.filter_since_tag(tags_from_strings(%w(1 2 3))) }
|
||||
|
||||
context "with valid since tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(since_tag: "2") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1))) }
|
||||
end
|
||||
|
||||
context "with invalid since tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(since_tag: "Invalid tag") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
end
|
||||
|
||||
context "with empty array" do
|
||||
subject { generator.filter_since_tag(tags_mash_from_strings(%w())) }
|
||||
subject { generator.filter_since_tag(tags_from_strings(%w())) }
|
||||
|
||||
context "with valid since tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(since_tag: "2") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w())) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w())) }
|
||||
end
|
||||
|
||||
context "with invalid since tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(since_tag: "Invalid tag") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w())) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w())) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#filter_due_tag" do
|
||||
context "with filled array" do
|
||||
subject { generator.filter_due_tag(tags_mash_from_strings(%w(1 2 3))) }
|
||||
subject { generator.filter_due_tag(tags_from_strings(%w(1 2 3))) }
|
||||
|
||||
context "with valid due tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(due_tag: "2") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(3))) }
|
||||
end
|
||||
|
||||
context "with invalid due tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(due_tag: "Invalid tag") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w(1 2 3))) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w(1 2 3))) }
|
||||
end
|
||||
end
|
||||
|
||||
context "with empty array" do
|
||||
subject { generator.filter_due_tag(tags_mash_from_strings(%w())) }
|
||||
subject { generator.filter_due_tag(tags_from_strings(%w())) }
|
||||
|
||||
context "with valid due tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(due_tag: "2") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w())) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w())) }
|
||||
end
|
||||
|
||||
context "with invalid due tag" do
|
||||
let(:generator) { GitHubChangelogGenerator::Generator.new(due_tag: "Invalid tag") }
|
||||
it { is_expected.to be_a Array }
|
||||
it { is_expected.to match_array(tags_mash_from_strings(%w())) }
|
||||
it { is_expected.to match_array(tags_from_strings(%w())) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -191,7 +194,7 @@ describe GitHubChangelogGenerator::Generator do
|
|||
end
|
||||
context "fetch already filled tag" do
|
||||
before { @generator.instance_variable_set :@tag_times_hash, "valid_tag" => current_time }
|
||||
subject { @generator.get_time_of_tag tag_mash_with_name("valid_tag") }
|
||||
subject { @generator.get_time_of_tag tag_with_name("valid_tag") }
|
||||
it { is_expected.to be_a_kind_of(Time) }
|
||||
it { is_expected.to eq(current_time) }
|
||||
end
|
||||
|
@ -202,7 +205,7 @@ describe GitHubChangelogGenerator::Generator do
|
|||
@generator.instance_variable_set :@fetcher, mock
|
||||
end
|
||||
subject do
|
||||
of_tag = @generator.get_time_of_tag(tag_mash_with_name("valid_tag"))
|
||||
of_tag = @generator.get_time_of_tag(tag_with_name("valid_tag"))
|
||||
of_tag
|
||||
end
|
||||
it { is_expected.to be_a_kind_of(Time) }
|
||||
|
@ -229,13 +232,13 @@ describe GitHubChangelogGenerator::Generator do
|
|||
@generator.sort_tags_by_date(tags)
|
||||
end
|
||||
context "sort unsorted tags" do
|
||||
let(:tags) { tags_mash_from_strings %w(valid_tag1 valid_tag2 valid_tag3) }
|
||||
let(:tags) { tags_from_strings %w(valid_tag1 valid_tag2 valid_tag3) }
|
||||
|
||||
it { is_expected.to be_a_kind_of(Array) }
|
||||
it { is_expected.to match_array(tags.reverse!) }
|
||||
end
|
||||
context "sort sorted tags" do
|
||||
let(:tags) { tags_mash_from_strings %w(valid_tag3 valid_tag2 valid_tag1) }
|
||||
let(:tags) { tags_from_strings %w(valid_tag3 valid_tag2 valid_tag1) }
|
||||
|
||||
it { is_expected.to be_a_kind_of(Array) }
|
||||
it { is_expected.to match_array(tags) }
|
||||
|
|
489
spec/unit/octo_fetcher_spec.rb
Normal file
489
spec/unit/octo_fetcher_spec.rb
Normal file
|
@ -0,0 +1,489 @@
|
|||
# frozen_string_literal: true
|
||||
VALID_TOKEN = "0123456789abcdef"
|
||||
INVALID_TOKEN = "0000000000000000"
|
||||
|
||||
describe GitHubChangelogGenerator::OctoFetcher do
|
||||
let(:options) do
|
||||
{
|
||||
user: "skywinder",
|
||||
project: "changelog_test"
|
||||
}
|
||||
end
|
||||
|
||||
let(:fetcher) { GitHubChangelogGenerator::OctoFetcher.new(options) }
|
||||
|
||||
describe "#fetch_github_token" do
|
||||
token = GitHubChangelogGenerator::OctoFetcher::CHANGELOG_GITHUB_TOKEN
|
||||
context "when token in ENV exist" do
|
||||
before { stub_const("ENV", ENV.to_hash.merge(token => VALID_TOKEN)) }
|
||||
subject { fetcher.send(:fetch_github_token) }
|
||||
it { is_expected.to eq(VALID_TOKEN) }
|
||||
end
|
||||
|
||||
context "when token in ENV is nil" do
|
||||
before { stub_const("ENV", ENV.to_hash.merge(token => nil)) }
|
||||
subject { fetcher.send(:fetch_github_token) }
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
|
||||
context "when token in options and ENV is nil" do
|
||||
let(:options) { { token: VALID_TOKEN } }
|
||||
|
||||
before do
|
||||
stub_const("ENV", ENV.to_hash.merge(token => nil))
|
||||
end
|
||||
|
||||
subject { fetcher.send(:fetch_github_token) }
|
||||
it { is_expected.to eq(VALID_TOKEN) }
|
||||
end
|
||||
|
||||
context "when token in options and ENV specified" do
|
||||
let(:options) { { token: VALID_TOKEN } }
|
||||
|
||||
before do
|
||||
stub_const("ENV", ENV.to_hash.merge(token => "no_matter_what"))
|
||||
end
|
||||
|
||||
subject { fetcher.send(:fetch_github_token) }
|
||||
it { is_expected.to eq(VALID_TOKEN) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "#get_all_tags" do
|
||||
context "when github_fetch_tags returns tags" do
|
||||
it "returns tags" do
|
||||
mock_tags = ["tag"]
|
||||
allow(fetcher).to receive(:github_fetch_tags).and_return(mock_tags)
|
||||
expect(fetcher.get_all_tags).to eq(mock_tags)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#github_fetch_tags" do
|
||||
context "when wrong token provided", :vcr do
|
||||
let(:options) do
|
||||
{
|
||||
user: "skywinder",
|
||||
project: "changelog_test",
|
||||
token: INVALID_TOKEN
|
||||
}
|
||||
end
|
||||
|
||||
it "should raise Unauthorized error" do
|
||||
expect { fetcher.github_fetch_tags }.to raise_error SystemExit, "Error: wrong GitHub token"
|
||||
end
|
||||
end
|
||||
|
||||
context "when API call is valid", :vcr do
|
||||
it "should return tags" do
|
||||
expected_tags = [{ "name" => "v0.0.3",
|
||||
"zipball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/zipball/v0.0.3",
|
||||
"tarball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/tarball/v0.0.3",
|
||||
"commit" =>
|
||||
{ "sha" => "a0cba2b1a1ea9011ab07ee1ac140ba5a5eb8bd90",
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/a0cba2b1a1ea9011ab07ee1ac140ba5a5eb8bd90" } },
|
||||
{ "name" => "v0.0.2",
|
||||
"zipball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/zipball/v0.0.2",
|
||||
"tarball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/tarball/v0.0.2",
|
||||
"commit" =>
|
||||
{ "sha" => "9b35bb13dcd15b68e7bcbf10cde5eb937a54f710",
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/9b35bb13dcd15b68e7bcbf10cde5eb937a54f710" } },
|
||||
{ "name" => "v0.0.1",
|
||||
"zipball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/zipball/v0.0.1",
|
||||
"tarball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/tarball/v0.0.1",
|
||||
"commit" =>
|
||||
{ "sha" => "4c2d6d1ed58bdb24b870dcb5d9f2ceed0283d69d",
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/4c2d6d1ed58bdb24b870dcb5d9f2ceed0283d69d" } },
|
||||
{ "name" => "0.0.4",
|
||||
"zipball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/zipball/0.0.4",
|
||||
"tarball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/tarball/0.0.4",
|
||||
"commit" =>
|
||||
{ "sha" => "ece0c3ab7142b21064b885061c55ede00ef6ce94",
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/ece0c3ab7142b21064b885061c55ede00ef6ce94" } }]
|
||||
|
||||
expect(fetcher.github_fetch_tags).to eq(expected_tags)
|
||||
end
|
||||
|
||||
it "should return tags count" do
|
||||
tags = fetcher.github_fetch_tags
|
||||
expect(tags.size).to eq(4)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#fetch_closed_issues_and_pr" do
|
||||
context "when API call is valid", :vcr do
|
||||
it "returns issues" do
|
||||
issues, pull_requests = fetcher.fetch_closed_issues_and_pr
|
||||
expect(issues.size).to eq(7)
|
||||
expect(pull_requests.size).to eq(14)
|
||||
end
|
||||
|
||||
it "returns issue with proper key/values" do
|
||||
issues, _pull_requests = fetcher.fetch_closed_issues_and_pr
|
||||
|
||||
expected_issue = { "url" => "https://api.github.com/repos/skywinder/changelog_test/issues/14",
|
||||
"repository_url" => "https://api.github.com/repos/skywinder/changelog_test",
|
||||
"labels_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/14/labels{/name}",
|
||||
"comments_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/14/comments",
|
||||
"events_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/14/events",
|
||||
"html_url" => "https://github.com/skywinder/changelog_test/issues/14",
|
||||
"id" => 95_419_412,
|
||||
"number" => 14,
|
||||
"title" => "Issue closed from commit from PR",
|
||||
"user" =>
|
||||
{ "login" => "skywinder",
|
||||
"id" => 3_356_474,
|
||||
"avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3",
|
||||
"gravatar_id" => "",
|
||||
"url" => "https://api.github.com/users/skywinder",
|
||||
"html_url" => "https://github.com/skywinder",
|
||||
"followers_url" => "https://api.github.com/users/skywinder/followers",
|
||||
"following_url" =>
|
||||
"https://api.github.com/users/skywinder/following{/other_user}",
|
||||
"gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}",
|
||||
"starred_url" =>
|
||||
"https://api.github.com/users/skywinder/starred{/owner}{/repo}",
|
||||
"subscriptions_url" => "https://api.github.com/users/skywinder/subscriptions",
|
||||
"organizations_url" => "https://api.github.com/users/skywinder/orgs",
|
||||
"repos_url" => "https://api.github.com/users/skywinder/repos",
|
||||
"events_url" => "https://api.github.com/users/skywinder/events{/privacy}",
|
||||
"received_events_url" =>
|
||||
"https://api.github.com/users/skywinder/received_events",
|
||||
"type" => "User",
|
||||
"site_admin" => false },
|
||||
"labels" => [],
|
||||
"state" => "closed",
|
||||
"locked" => false,
|
||||
"assignee" => nil,
|
||||
"assignees" => [],
|
||||
"milestone" => nil,
|
||||
"comments" => 0,
|
||||
"created_at" => "2015-07-16T12:06:08Z",
|
||||
"updated_at" => "2015-07-16T12:21:42Z",
|
||||
"closed_at" => "2015-07-16T12:21:42Z",
|
||||
"body" => "" }
|
||||
|
||||
# Convert times to Time
|
||||
expected_issue.each_pair do |k, v|
|
||||
expected_issue[k] = Time.parse(v) if v =~ /^2015-/
|
||||
end
|
||||
|
||||
expect(issues.first).to eq(expected_issue)
|
||||
end
|
||||
|
||||
it "returns pull request with proper key/values" do
|
||||
_issues, pull_requests = fetcher.fetch_closed_issues_and_pr
|
||||
|
||||
expected_pr = { "url" => "https://api.github.com/repos/skywinder/changelog_test/issues/21",
|
||||
"repository_url" => "https://api.github.com/repos/skywinder/changelog_test",
|
||||
"labels_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/21/labels{/name}",
|
||||
"comments_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/21/comments",
|
||||
"events_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/21/events",
|
||||
"html_url" => "https://github.com/skywinder/changelog_test/pull/21",
|
||||
"id" => 124_925_759,
|
||||
"number" => 21,
|
||||
"title" => "Merged br (should appear in change log with #20)",
|
||||
"user" =>
|
||||
{ "login" => "skywinder",
|
||||
"id" => 3_356_474,
|
||||
"avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3",
|
||||
"gravatar_id" => "",
|
||||
"url" => "https://api.github.com/users/skywinder",
|
||||
"html_url" => "https://github.com/skywinder",
|
||||
"followers_url" => "https://api.github.com/users/skywinder/followers",
|
||||
"following_url" =>
|
||||
"https://api.github.com/users/skywinder/following{/other_user}",
|
||||
"gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}",
|
||||
"starred_url" =>
|
||||
"https://api.github.com/users/skywinder/starred{/owner}{/repo}",
|
||||
"subscriptions_url" => "https://api.github.com/users/skywinder/subscriptions",
|
||||
"organizations_url" => "https://api.github.com/users/skywinder/orgs",
|
||||
"repos_url" => "https://api.github.com/users/skywinder/repos",
|
||||
"events_url" => "https://api.github.com/users/skywinder/events{/privacy}",
|
||||
"received_events_url" =>
|
||||
"https://api.github.com/users/skywinder/received_events",
|
||||
"type" => "User",
|
||||
"site_admin" => false },
|
||||
"labels" => [],
|
||||
"state" => "closed",
|
||||
"locked" => false,
|
||||
"assignee" => nil,
|
||||
"assignees" => [],
|
||||
"milestone" => nil,
|
||||
"comments" => 0,
|
||||
"created_at" => "2016-01-05T09:24:08Z",
|
||||
"updated_at" => "2016-01-05T09:26:53Z",
|
||||
"closed_at" => "2016-01-05T09:24:27Z",
|
||||
"pull_request" =>
|
||||
{ "url" => "https://api.github.com/repos/skywinder/changelog_test/pulls/21",
|
||||
"html_url" => "https://github.com/skywinder/changelog_test/pull/21",
|
||||
"diff_url" => "https://github.com/skywinder/changelog_test/pull/21.diff",
|
||||
"patch_url" => "https://github.com/skywinder/changelog_test/pull/21.patch" },
|
||||
"body" =>
|
||||
"to test https://github.com/skywinder/github-changelog-generator/pull/305\r\nshould appear in change log with #20" }
|
||||
|
||||
# Convert times to Time
|
||||
expected_pr.each_pair do |k, v|
|
||||
expected_pr[k] = Time.parse(v) if v =~ /^2016-01/
|
||||
end
|
||||
|
||||
expect(pull_requests.first).to eq(expected_pr)
|
||||
end
|
||||
|
||||
it "returns issues with labels" do
|
||||
issues, _pull_requests = fetcher.fetch_closed_issues_and_pr
|
||||
expected = [[], [], ["Bug"], [], ["enhancement"], ["some label"], []]
|
||||
expect(issues.map { |i| i["labels"].map { |l| l["name"] } }).to eq(expected)
|
||||
end
|
||||
|
||||
it "returns pull_requests with labels" do
|
||||
_issues, pull_requests = fetcher.fetch_closed_issues_and_pr
|
||||
expected = [[], [], [], [], [], ["enhancement"], [], [], ["invalid"], [], [], [], [], ["invalid"]]
|
||||
expect(pull_requests.map { |i| i["labels"].map { |l| l["name"] } }).to eq(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#fetch_closed_pull_requests" do
|
||||
context "when API call is valid", :vcr do
|
||||
it "returns pull requests" do
|
||||
pull_requests = fetcher.fetch_closed_pull_requests
|
||||
expect(pull_requests.size).to eq(14)
|
||||
end
|
||||
|
||||
it "returns correct pull request keys" do
|
||||
pull_requests = fetcher.fetch_closed_pull_requests
|
||||
|
||||
pr = pull_requests.first
|
||||
expect(pr.keys).to eq(%w(url id html_url diff_url patch_url issue_url number state locked title user body created_at updated_at closed_at merged_at merge_commit_sha assignee assignees milestone commits_url review_comments_url review_comment_url comments_url statuses_url head base _links))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#fetch_events_async" do
|
||||
context "when API call is valid", :vcr do
|
||||
it "populates issues" do
|
||||
issues = [{ "url" => "https://api.github.com/repos/skywinder/changelog_test/issues/14",
|
||||
"repository_url" => "https://api.github.com/repos/skywinder/changelog_test",
|
||||
"labels_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/14/labels{/name}",
|
||||
"comments_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/14/comments",
|
||||
"events_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/14/events",
|
||||
"html_url" => "https://github.com/skywinder/changelog_test/issues/14",
|
||||
"id" => 95_419_412,
|
||||
"number" => 14,
|
||||
"title" => "Issue closed from commit from PR",
|
||||
"user" =>
|
||||
{ "login" => "skywinder",
|
||||
"id" => 3_356_474,
|
||||
"avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3",
|
||||
"gravatar_id" => "",
|
||||
"url" => "https://api.github.com/users/skywinder",
|
||||
"html_url" => "https://github.com/skywinder",
|
||||
"followers_url" => "https://api.github.com/users/skywinder/followers",
|
||||
"following_url" =>
|
||||
"https://api.github.com/users/skywinder/following{/other_user}",
|
||||
"gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}",
|
||||
"starred_url" =>
|
||||
"https://api.github.com/users/skywinder/starred{/owner}{/repo}",
|
||||
"subscriptions_url" =>
|
||||
"https://api.github.com/users/skywinder/subscriptions",
|
||||
"organizations_url" => "https://api.github.com/users/skywinder/orgs",
|
||||
"repos_url" => "https://api.github.com/users/skywinder/repos",
|
||||
"events_url" => "https://api.github.com/users/skywinder/events{/privacy}",
|
||||
"received_events_url" =>
|
||||
"https://api.github.com/users/skywinder/received_events",
|
||||
"type" => "User",
|
||||
"site_admin" => false },
|
||||
"labels" => [],
|
||||
"state" => "closed",
|
||||
"locked" => false,
|
||||
"assignee" => nil,
|
||||
"assignees" => [],
|
||||
"milestone" => nil,
|
||||
"comments" => 0,
|
||||
"created_at" => "2015-07-16T12:06:08Z",
|
||||
"updated_at" => "2015-07-16T12:21:42Z",
|
||||
"closed_at" => "2015-07-16T12:21:42Z",
|
||||
"body" => "" }]
|
||||
|
||||
# Check that they are blank to begin with
|
||||
expect(issues.first["events"]).to be_nil
|
||||
|
||||
fetcher.fetch_events_async(issues)
|
||||
issue_events = issues.first["events"]
|
||||
|
||||
expected_events = [{ "id" => 357_462_189,
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/events/357462189",
|
||||
"actor" =>
|
||||
{ "login" => "skywinder",
|
||||
"id" => 3_356_474,
|
||||
"avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3",
|
||||
"gravatar_id" => "",
|
||||
"url" => "https://api.github.com/users/skywinder",
|
||||
"html_url" => "https://github.com/skywinder",
|
||||
"followers_url" => "https://api.github.com/users/skywinder/followers",
|
||||
"following_url" =>
|
||||
"https://api.github.com/users/skywinder/following{/other_user}",
|
||||
"gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}",
|
||||
"starred_url" =>
|
||||
"https://api.github.com/users/skywinder/starred{/owner}{/repo}",
|
||||
"subscriptions_url" =>
|
||||
"https://api.github.com/users/skywinder/subscriptions",
|
||||
"organizations_url" => "https://api.github.com/users/skywinder/orgs",
|
||||
"repos_url" => "https://api.github.com/users/skywinder/repos",
|
||||
"events_url" => "https://api.github.com/users/skywinder/events{/privacy}",
|
||||
"received_events_url" =>
|
||||
"https://api.github.com/users/skywinder/received_events",
|
||||
"type" => "User",
|
||||
"site_admin" => false },
|
||||
"event" => "referenced",
|
||||
"commit_id" => "decfe840d1a1b86e0c28700de5362d3365a29555",
|
||||
"commit_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/decfe840d1a1b86e0c28700de5362d3365a29555",
|
||||
"created_at" => "2015-07-16T12:21:16Z" },
|
||||
{ "id" => 357_462_542,
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/events/357462542",
|
||||
"actor" =>
|
||||
{ "login" => "skywinder",
|
||||
"id" => 3_356_474,
|
||||
"avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3",
|
||||
"gravatar_id" => "",
|
||||
"url" => "https://api.github.com/users/skywinder",
|
||||
"html_url" => "https://github.com/skywinder",
|
||||
"followers_url" => "https://api.github.com/users/skywinder/followers",
|
||||
"following_url" =>
|
||||
"https://api.github.com/users/skywinder/following{/other_user}",
|
||||
"gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}",
|
||||
"starred_url" =>
|
||||
"https://api.github.com/users/skywinder/starred{/owner}{/repo}",
|
||||
"subscriptions_url" =>
|
||||
"https://api.github.com/users/skywinder/subscriptions",
|
||||
"organizations_url" => "https://api.github.com/users/skywinder/orgs",
|
||||
"repos_url" => "https://api.github.com/users/skywinder/repos",
|
||||
"events_url" => "https://api.github.com/users/skywinder/events{/privacy}",
|
||||
"received_events_url" =>
|
||||
"https://api.github.com/users/skywinder/received_events",
|
||||
"type" => "User",
|
||||
"site_admin" => false },
|
||||
"event" => "closed",
|
||||
"commit_id" => "decfe840d1a1b86e0c28700de5362d3365a29555",
|
||||
"commit_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/decfe840d1a1b86e0c28700de5362d3365a29555",
|
||||
"created_at" => "2015-07-16T12:21:42Z" }]
|
||||
|
||||
# Convert times to Time
|
||||
expected_events.map! do |event|
|
||||
event.each_pair do |k, v|
|
||||
event[k] = Time.parse(v) if v =~ /^201[56]-/
|
||||
end
|
||||
end
|
||||
|
||||
expect(issue_events).to eq(expected_events)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#fetch_date_of_tag" do
|
||||
context "when API call is valid", :vcr do
|
||||
it "returns date" do
|
||||
tag = { "name" => "v0.0.3",
|
||||
"zipball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/zipball/v0.0.3",
|
||||
"tarball_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/tarball/v0.0.3",
|
||||
"commit" =>
|
||||
{ "sha" => "a0cba2b1a1ea9011ab07ee1ac140ba5a5eb8bd90",
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/a0cba2b1a1ea9011ab07ee1ac140ba5a5eb8bd90" } }
|
||||
|
||||
dt = fetcher.fetch_date_of_tag(tag)
|
||||
expect(dt).to eq(Time.parse("2015-03-04 19:01:48 UTC"))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#fetch_commit" do
|
||||
context "when API call is valid", :vcr do
|
||||
it "returns commit" do
|
||||
event = { "id" => 357_462_189,
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/issues/events/357462189",
|
||||
"actor" =>
|
||||
{ "login" => "skywinder",
|
||||
"id" => 3_356_474,
|
||||
"avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3",
|
||||
"gravatar_id" => "",
|
||||
"url" => "https://api.github.com/users/skywinder",
|
||||
"html_url" => "https://github.com/skywinder",
|
||||
"followers_url" => "https://api.github.com/users/skywinder/followers",
|
||||
"following_url" =>
|
||||
"https://api.github.com/users/skywinder/following{/other_user}",
|
||||
"gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}",
|
||||
"starred_url" =>
|
||||
"https://api.github.com/users/skywinder/starred{/owner}{/repo}",
|
||||
"subscriptions_url" => "https://api.github.com/users/skywinder/subscriptions",
|
||||
"organizations_url" => "https://api.github.com/users/skywinder/orgs",
|
||||
"repos_url" => "https://api.github.com/users/skywinder/repos",
|
||||
"events_url" => "https://api.github.com/users/skywinder/events{/privacy}",
|
||||
"received_events_url" =>
|
||||
"https://api.github.com/users/skywinder/received_events",
|
||||
"type" => "User",
|
||||
"site_admin" => false },
|
||||
"event" => "referenced",
|
||||
"commit_id" => "decfe840d1a1b86e0c28700de5362d3365a29555",
|
||||
"commit_url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/decfe840d1a1b86e0c28700de5362d3365a29555",
|
||||
"created_at" => "2015-07-16T12:21:16Z" }
|
||||
commit = fetcher.fetch_commit(event)
|
||||
|
||||
expectations = [
|
||||
%w(sha decfe840d1a1b86e0c28700de5362d3365a29555),
|
||||
["url",
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/decfe840d1a1b86e0c28700de5362d3365a29555"],
|
||||
# OLD API: "https://api.github.com/repos/skywinder/changelog_test/git/commits/decfe840d1a1b86e0c28700de5362d3365a29555"],
|
||||
["html_url",
|
||||
"https://github.com/skywinder/changelog_test/commit/decfe840d1a1b86e0c28700de5362d3365a29555"],
|
||||
["author",
|
||||
{ "login" => "skywinder", "id" => 3_356_474, "avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3", "gravatar_id" => "", "url" => "https://api.github.com/users/skywinder", "html_url" => "https://github.com/skywinder", "followers_url" => "https://api.github.com/users/skywinder/followers", "following_url" => "https://api.github.com/users/skywinder/following{/other_user}", "gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}", "starred_url" => "https://api.github.com/users/skywinder/starred{/owner}{/repo}", "subscriptions_url" => "https://api.github.com/users/skywinder/subscriptions", "organizations_url" => "https://api.github.com/users/skywinder/orgs", "repos_url" => "https://api.github.com/users/skywinder/repos", "events_url" => "https://api.github.com/users/skywinder/events{/privacy}", "received_events_url" => "https://api.github.com/users/skywinder/received_events", "type" => "User", "site_admin" => false }],
|
||||
["committer",
|
||||
{ "login" => "skywinder", "id" => 3_356_474, "avatar_url" => "https://avatars.githubusercontent.com/u/3356474?v=3", "gravatar_id" => "", "url" => "https://api.github.com/users/skywinder", "html_url" => "https://github.com/skywinder", "followers_url" => "https://api.github.com/users/skywinder/followers", "following_url" => "https://api.github.com/users/skywinder/following{/other_user}", "gists_url" => "https://api.github.com/users/skywinder/gists{/gist_id}", "starred_url" => "https://api.github.com/users/skywinder/starred{/owner}{/repo}", "subscriptions_url" => "https://api.github.com/users/skywinder/subscriptions", "organizations_url" => "https://api.github.com/users/skywinder/orgs", "repos_url" => "https://api.github.com/users/skywinder/repos", "events_url" => "https://api.github.com/users/skywinder/events{/privacy}", "received_events_url" => "https://api.github.com/users/skywinder/received_events", "type" => "User", "site_admin" => false }],
|
||||
["parents",
|
||||
[{ "sha" => "7ec095e5e3caceacedabf44d0b9b10da17c92e51",
|
||||
"url" =>
|
||||
"https://api.github.com/repos/skywinder/changelog_test/commits/7ec095e5e3caceacedabf44d0b9b10da17c92e51",
|
||||
# OLD API: "https://api.github.com/repos/skywinder/changelog_test/git/commits/7ec095e5e3caceacedabf44d0b9b10da17c92e51",
|
||||
"html_url" =>
|
||||
"https://github.com/skywinder/changelog_test/commit/7ec095e5e3caceacedabf44d0b9b10da17c92e51" }]]
|
||||
]
|
||||
|
||||
expectations.each do |property, val|
|
||||
expect(commit[property]).to eq(val)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
{"http_interactions":[{"request":{"method":"get","uri":"https://api.github.com/repos/skywinder/changelog_test/issues/14/events?per_page=100","body":{"encoding":"US-ASCII","base64_string":""},"headers":{"Accept":["application/vnd.github.v3+json"],"User-Agent":["Octokit Ruby Gem 4.3.0"],"Content-Type":["application/json"],"Accept-Encoding":["gzip;q=1.0,deflate;q=0.6,identity;q=0.3"]}},"response":{"status":{"code":200,"message":"OK"},"headers":{"Server":["GitHub.com"],"Date":["Fri, 20 May 2016 06:13:06 GMT"],"Content-Type":["application/json; charset=utf-8"],"Transfer-Encoding":["chunked"],"Status":["200 OK"],"X-Ratelimit-Limit":["60"],"X-Ratelimit-Remaining":["37"],"X-Ratelimit-Reset":["1463724861"],"Cache-Control":["public, max-age=60, s-maxage=60"],"Vary":["Accept","Accept-Encoding"],"Etag":["W/\"4d408c4e8053c706d7255563141ccb80\""],"X-Github-Media-Type":["github.v3; format=json"],"Access-Control-Expose-Headers":["ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval"],"Access-Control-Allow-Origin":["*"],"Content-Security-Policy":["default-src 'none'"],"Strict-Transport-Security":["max-age=31536000; includeSubdomains; preload"],"X-Content-Type-Options":["nosniff"],"X-Frame-Options":["deny"],"X-Xss-Protection":["1; mode=block"],"X-Served-By":["2d7a5e35115884240089368322196939"],"X-Github-Request-Id":["6C2F0F69:7F8B:2084B01:573EAAF2"]},"body":{"encoding":"ASCII-8BIT","base64_string":"W3siaWQiOjM1NzQ2MjE4OSwidXJsIjoiaHR0cHM6Ly9hcGkuZ2l0aHViLmNv\nbS9yZXBvcy9za3l3aW5kZXIvY2hhbmdlbG9nX3Rlc3QvaXNzdWVzL2V2ZW50\ncy8zNTc0NjIxODkiLCJhY3RvciI6eyJsb2dpbiI6InNreXdpbmRlciIsImlk\nIjozMzU2NDc0LCJhdmF0YXJfdXJsIjoiaHR0cHM6Ly9hdmF0YXJzLmdpdGh1\nYnVzZXJjb250ZW50LmNvbS91LzMzNTY0NzQ/dj0zIiwiZ3JhdmF0YXJfaWQi\nOiIiLCJ1cmwiOiJodHRwczovL2FwaS5naXRodWIuY29tL3VzZXJzL3NreXdp\nbmRlciIsImh0bWxfdXJsIjoiaHR0cHM6Ly9naXRodWIuY29tL3NreXdpbmRl\nciIsImZvbGxvd2Vyc191cmwiOiJodHRwczovL2FwaS5naXRodWIuY29tL3Vz\nZXJzL3NreXdpbmRlci9mb2xsb3dlcnMiLCJmb2xsb3dpbmdfdXJsIjoiaHR0\ncHM6Ly9hcGkuZ2l0aHViLmNvbS91c2Vycy9za3l3aW5kZXIvZm9sbG93aW5n\ney9vdGhlcl91c2VyfSIsImdpc3RzX3VybCI6Imh0dHBzOi8vYXBpLmdpdGh1\nYi5jb20vdXNlcnMvc2t5d2luZGVyL2dpc3Rzey9naXN0X2lkfSIsInN0YXJy\nZWRfdXJsIjoiaHR0cHM6Ly9hcGkuZ2l0aHViLmNvbS91c2Vycy9za3l3aW5k\nZXIvc3RhcnJlZHsvb3duZXJ9ey9yZXBvfSIsInN1YnNjcmlwdGlvbnNfdXJs\nIjoiaHR0cHM6Ly9hcGkuZ2l0aHViLmNvbS91c2Vycy9za3l3aW5kZXIvc3Vi\nc2NyaXB0aW9ucyIsIm9yZ2FuaXphdGlvbnNfdXJsIjoiaHR0cHM6Ly9hcGku\nZ2l0aHViLmNvbS91c2Vycy9za3l3aW5kZXIvb3JncyIsInJlcG9zX3VybCI6\nImh0dHBzOi8vYXBpLmdpdGh1Yi5jb20vdXNlcnMvc2t5d2luZGVyL3JlcG9z\nIiwiZXZlbnRzX3VybCI6Imh0dHBzOi8vYXBpLmdpdGh1Yi5jb20vdXNlcnMv\nc2t5d2luZGVyL2V2ZW50c3svcHJpdmFjeX0iLCJyZWNlaXZlZF9ldmVudHNf\ndXJsIjoiaHR0cHM6Ly9hcGkuZ2l0aHViLmNvbS91c2Vycy9za3l3aW5kZXIv\ncmVjZWl2ZWRfZXZlbnRzIiwidHlwZSI6IlVzZXIiLCJzaXRlX2FkbWluIjpm\nYWxzZX0sImV2ZW50IjoicmVmZXJlbmNlZCIsImNvbW1pdF9pZCI6ImRlY2Zl\nODQwZDFhMWI4NmUwYzI4NzAwZGU1MzYyZDMzNjVhMjk1NTUiLCJjb21taXRf\ndXJsIjoiaHR0cHM6Ly9hcGkuZ2l0aHViLmNvbS9yZXBvcy9za3l3aW5kZXIv\nY2hhbmdlbG9nX3Rlc3QvY29tbWl0cy9kZWNmZTg0MGQxYTFiODZlMGMyODcw\nMGRlNTM2MmQzMzY1YTI5NTU1IiwiY3JlYXRlZF9hdCI6IjIwMTUtMDctMTZU\nMTI6MjE6MTZaIn0seyJpZCI6MzU3NDYyNTQyLCJ1cmwiOiJodHRwczovL2Fw\naS5naXRodWIuY29tL3JlcG9zL3NreXdpbmRlci9jaGFuZ2Vsb2dfdGVzdC9p\nc3N1ZXMvZXZlbnRzLzM1NzQ2MjU0MiIsImFjdG9yIjp7ImxvZ2luIjoic2t5\nd2luZGVyIiwiaWQiOjMzNTY0NzQsImF2YXRhcl91cmwiOiJodHRwczovL2F2\nYXRhcnMuZ2l0aHVidXNlcmNvbnRlbnQuY29tL3UvMzM1NjQ3ND92PTMiLCJn\ncmF2YXRhcl9pZCI6IiIsInVybCI6Imh0dHBzOi8vYXBpLmdpdGh1Yi5jb20v\ndXNlcnMvc2t5d2luZGVyIiwiaHRtbF91cmwiOiJodHRwczovL2dpdGh1Yi5j\nb20vc2t5d2luZGVyIiwiZm9sbG93ZXJzX3VybCI6Imh0dHBzOi8vYXBpLmdp\ndGh1Yi5jb20vdXNlcnMvc2t5d2luZGVyL2ZvbGxvd2VycyIsImZvbGxvd2lu\nZ191cmwiOiJodHRwczovL2FwaS5naXRodWIuY29tL3VzZXJzL3NreXdpbmRl\nci9mb2xsb3dpbmd7L290aGVyX3VzZXJ9IiwiZ2lzdHNfdXJsIjoiaHR0cHM6\nLy9hcGkuZ2l0aHViLmNvbS91c2Vycy9za3l3aW5kZXIvZ2lzdHN7L2dpc3Rf\naWR9Iiwic3RhcnJlZF91cmwiOiJodHRwczovL2FwaS5naXRodWIuY29tL3Vz\nZXJzL3NreXdpbmRlci9zdGFycmVkey9vd25lcn17L3JlcG99Iiwic3Vic2Ny\naXB0aW9uc191cmwiOiJodHRwczovL2FwaS5naXRodWIuY29tL3VzZXJzL3Nr\neXdpbmRlci9zdWJzY3JpcHRpb25zIiwib3JnYW5pemF0aW9uc191cmwiOiJo\ndHRwczovL2FwaS5naXRodWIuY29tL3VzZXJzL3NreXdpbmRlci9vcmdzIiwi\ncmVwb3NfdXJsIjoiaHR0cHM6Ly9hcGkuZ2l0aHViLmNvbS91c2Vycy9za3l3\naW5kZXIvcmVwb3MiLCJldmVudHNfdXJsIjoiaHR0cHM6Ly9hcGkuZ2l0aHVi\nLmNvbS91c2Vycy9za3l3aW5kZXIvZXZlbnRzey9wcml2YWN5fSIsInJlY2Vp\ndmVkX2V2ZW50c191cmwiOiJodHRwczovL2FwaS5naXRodWIuY29tL3VzZXJz\nL3NreXdpbmRlci9yZWNlaXZlZF9ldmVudHMiLCJ0eXBlIjoiVXNlciIsInNp\ndGVfYWRtaW4iOmZhbHNlfSwiZXZlbnQiOiJjbG9zZWQiLCJjb21taXRfaWQi\nOiJkZWNmZTg0MGQxYTFiODZlMGMyODcwMGRlNTM2MmQzMzY1YTI5NTU1Iiwi\nY29tbWl0X3VybCI6Imh0dHBzOi8vYXBpLmdpdGh1Yi5jb20vcmVwb3Mvc2t5\nd2luZGVyL2NoYW5nZWxvZ190ZXN0L2NvbW1pdHMvZGVjZmU4NDBkMWExYjg2\nZTBjMjg3MDBkZTUzNjJkMzM2NWEyOTU1NSIsImNyZWF0ZWRfYXQiOiIyMDE1\nLTA3LTE2VDEyOjIxOjQyWiJ9XQ==\n"},"http_version":null},"recorded_at":"Fri, 20 May 2016 06:13:10 GMT"}],"recorded_with":"VCR 3.0.1"}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
{"http_interactions":[{"request":{"method":"get","uri":"https://api.github.com/repos/skywinder/changelog_test/tags?per_page=100","body":{"encoding":"US-ASCII","base64_string":""},"headers":{"Accept":["application/vnd.github.v3+json"],"User-Agent":["Octokit Ruby Gem 4.3.0"],"Content-Type":["application/json"],"Authorization":["token 0000000000000000"],"Accept-Encoding":["gzip;q=1.0,deflate;q=0.6,identity;q=0.3"]}},"response":{"status":{"code":401,"message":"Unauthorized"},"headers":{"Server":["GitHub.com"],"Date":["Fri, 20 May 2016 05:47:03 GMT"],"Content-Type":["application/json; charset=utf-8"],"Content-Length":["83"],"Status":["401 Unauthorized"],"X-Github-Media-Type":["github.v3; format=json"],"X-Ratelimit-Limit":["60"],"X-Ratelimit-Remaining":["42"],"X-Ratelimit-Reset":["1463724861"],"Access-Control-Expose-Headers":["ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval"],"Access-Control-Allow-Origin":["*"],"Content-Security-Policy":["default-src 'none'"],"Strict-Transport-Security":["max-age=31536000; includeSubdomains; preload"],"X-Content-Type-Options":["nosniff"],"X-Frame-Options":["deny"],"X-Xss-Protection":["1; mode=block"],"X-Github-Request-Id":["6C2F0F69:1271A:B8980B7:573EA4D6"]},"body":{"encoding":"UTF-8","base64_string":"eyJtZXNzYWdlIjoiQmFkIGNyZWRlbnRpYWxzIiwiZG9jdW1lbnRhdGlvbl91\ncmwiOiJodHRwczovL2RldmVsb3Blci5naXRodWIuY29tL3YzIn0=\n"},"http_version":null},"recorded_at":"Fri, 20 May 2016 05:47:07 GMT"}],"recorded_with":"VCR 3.0.1"}
|
|
@ -0,0 +1 @@
|
|||
{"http_interactions":[{"request":{"method":"get","uri":"https://api.github.com/repos/skywinder/changelog_test/tags?per_page=100","body":{"encoding":"US-ASCII","base64_string":""},"headers":{"Accept":["application/vnd.github.v3+json"],"User-Agent":["Octokit Ruby Gem 4.3.0"],"Content-Type":["application/json"],"Authorization":["token 0000000000000000"],"Accept-Encoding":["gzip;q=1.0,deflate;q=0.6,identity;q=0.3"]}},"response":{"status":{"code":401,"message":"Unauthorized"},"headers":{"Server":["GitHub.com"],"Date":["Sat, 02 Jul 2016 14:13:57 GMT"],"Content-Type":["application/json; charset=utf-8"],"Content-Length":["83"],"Status":["401 Unauthorized"],"X-Github-Media-Type":["github.v3; format=json"],"X-Ratelimit-Limit":["60"],"X-Ratelimit-Remaining":["59"],"X-Ratelimit-Reset":["1467472437"],"Access-Control-Expose-Headers":["ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval"],"Access-Control-Allow-Origin":["*"],"Content-Security-Policy":["default-src 'none'"],"Strict-Transport-Security":["max-age=31536000; includeSubdomains; preload"],"X-Content-Type-Options":["nosniff"],"X-Frame-Options":["deny"],"X-Xss-Protection":["1; mode=block"],"X-Github-Request-Id":["54DBAB83:300E:76DBC85:5777CC25"]},"body":{"encoding":"UTF-8","base64_string":"eyJtZXNzYWdlIjoiQmFkIGNyZWRlbnRpYWxzIiwiZG9jdW1lbnRhdGlvbl91\ncmwiOiJodHRwczovL2RldmVsb3Blci5naXRodWIuY29tL3YzIn0=\n"},"http_version":null},"recorded_at":"Sat, 02 Jul 2016 14:13:58 GMT"}],"recorded_with":"VCR 3.0.1"}
|
Loading…
Reference in New Issue
Block a user