2014-11-06 13:51:15 +00:00
|
|
|
#!/usr/bin/env ruby
|
|
|
|
|
|
|
|
require 'github_api'
|
|
|
|
require 'json'
|
2014-11-12 15:59:40 +00:00
|
|
|
require 'colorize'
|
2014-12-15 12:15:39 +00:00
|
|
|
require 'benchmark'
|
|
|
|
|
2014-11-06 14:11:45 +00:00
|
|
|
require_relative 'github_changelog_generator/parser'
|
2014-12-03 13:38:05 +00:00
|
|
|
require_relative 'github_changelog_generator/generator'
|
2014-11-17 16:01:10 +00:00
|
|
|
require_relative 'github_changelog_generator/version'
|
2015-03-26 04:58:16 +00:00
|
|
|
require_relative 'github_changelog_generator/reader'
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
module GitHubChangelogGenerator
|
|
|
|
class ChangelogGenerator
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
attr_accessor :options, :all_tags, :github
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-12-15 13:53:02 +00:00
|
|
|
PER_PAGE_NUMBER = 30
|
2015-03-23 15:13:44 +00:00
|
|
|
GH_RATE_LIMIT_EXCEEDED_MSG = 'Warning: GitHub API rate limit exceed (5000 per hour), change log may not ' +
|
|
|
|
'contain some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument'
|
2014-12-15 13:19:56 +00:00
|
|
|
|
2014-11-18 13:20:57 +00:00
|
|
|
def initialize
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
@options = Parser.parse_options
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-03-05 08:33:54 +00:00
|
|
|
fetch_github_token
|
2014-11-12 15:59:40 +00:00
|
|
|
|
2014-12-19 21:55:42 +00:00
|
|
|
github_options = {per_page: PER_PAGE_NUMBER}
|
|
|
|
github_options[:oauth_token] = @github_token unless @github_token.nil?
|
|
|
|
github_options[:endpoint] = options[:github_endpoint] unless options[:github_endpoint].nil?
|
|
|
|
github_options[:site] = options[:github_endpoint] unless options[:github_site].nil?
|
|
|
|
|
2015-03-23 15:13:44 +00:00
|
|
|
begin
|
|
|
|
@github = Github.new github_options
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2014-11-12 15:59:40 +00:00
|
|
|
|
2014-12-03 14:08:20 +00:00
|
|
|
@generator = Generator.new(@options)
|
2014-12-03 13:38:05 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
@all_tags = self.get_all_tags
|
2015-02-18 21:32:40 +00:00
|
|
|
@issues, @pull_requests = self.fetch_issues_and_pull_requests
|
|
|
|
|
|
|
|
if @options[:pulls]
|
|
|
|
@pull_requests = self.get_filtered_pull_requests
|
|
|
|
else
|
|
|
|
@pull_requests = []
|
|
|
|
end
|
|
|
|
|
2014-11-19 10:42:23 +00:00
|
|
|
if @options[:issues]
|
2015-02-18 20:21:00 +00:00
|
|
|
@issues = self.get_filtered_issues
|
2014-11-19 10:42:23 +00:00
|
|
|
else
|
|
|
|
@issues = []
|
|
|
|
end
|
2014-11-12 15:59:40 +00:00
|
|
|
|
2015-03-04 19:25:21 +00:00
|
|
|
fetch_event_for_issues_and_pr
|
|
|
|
detect_actual_closed_dates
|
2014-11-17 15:54:13 +00:00
|
|
|
@tag_times_hash = {}
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-17 20:39:37 +00:00
|
|
|
def detect_actual_closed_dates
|
|
|
|
|
2015-02-17 21:21:19 +00:00
|
|
|
if @options[:verbose]
|
2015-02-25 17:02:41 +00:00
|
|
|
print "Fetching closed dates for issues...\r"
|
2015-02-17 21:21:19 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
threads = []
|
2015-03-04 19:25:21 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
@issues.each { |issue|
|
2015-02-17 21:21:19 +00:00
|
|
|
threads << Thread.new {
|
|
|
|
find_closed_date_by_commit(issue)
|
|
|
|
}
|
2015-02-17 20:39:37 +00:00
|
|
|
}
|
2015-03-04 19:25:21 +00:00
|
|
|
|
|
|
|
@pull_requests.each { |pull_request|
|
|
|
|
threads << Thread.new {
|
|
|
|
find_closed_date_by_commit(pull_request)
|
|
|
|
}
|
|
|
|
}
|
2015-02-17 21:21:19 +00:00
|
|
|
threads.each { |thr| thr.join }
|
|
|
|
|
|
|
|
if @options[:verbose]
|
2015-02-25 17:02:41 +00:00
|
|
|
puts 'Fetching closed dates for issues: Done!'
|
2015-02-17 21:21:19 +00:00
|
|
|
end
|
2015-02-17 20:39:37 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def find_closed_date_by_commit(issue)
|
|
|
|
unless issue['events'].nil?
|
2015-03-04 19:25:21 +00:00
|
|
|
#if it's PR -> then find "merged event", in case of usual issue -> fond closed date
|
|
|
|
compare_string = issue[:merged_at].nil? ? 'closed' : 'merged'
|
2015-02-17 21:09:07 +00:00
|
|
|
# reverse! - to find latest closed event. (event goes in date order)
|
2015-02-18 16:27:57 +00:00
|
|
|
issue['events'].reverse!.each { |event|
|
2015-03-04 19:25:21 +00:00
|
|
|
if event[:event].eql? compare_string
|
2015-02-17 21:09:07 +00:00
|
|
|
if event[:commit_id].nil?
|
|
|
|
issue[:actual_date] = issue[:closed_at]
|
|
|
|
else
|
2015-03-03 13:34:19 +00:00
|
|
|
begin
|
2015-03-23 15:13:44 +00:00
|
|
|
begin
|
|
|
|
commit = @github.git_data.commits.get @options[:user], @options[:project], event[:commit_id]
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2015-03-03 13:34:19 +00:00
|
|
|
issue[:actual_date] = commit[:author][:date]
|
|
|
|
rescue
|
|
|
|
puts "Warning: can't fetch commit #{event[:commit_id]} probably it referenced from another repo."
|
|
|
|
issue[:actual_date] = issue[:closed_at]
|
|
|
|
end
|
2015-02-17 21:09:07 +00:00
|
|
|
end
|
|
|
|
break
|
|
|
|
end
|
2015-02-17 20:39:37 +00:00
|
|
|
}
|
|
|
|
end
|
2015-02-17 21:25:14 +00:00
|
|
|
#TODO: assert issues, that remain without 'actual_date' hash for some reason.
|
2015-02-17 20:39:37 +00:00
|
|
|
end
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
def print_json(json)
|
|
|
|
puts JSON.pretty_generate(json)
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 21:32:40 +00:00
|
|
|
def fetch_merged_at_pull_requests
|
2014-11-19 10:43:55 +00:00
|
|
|
if @options[:verbose]
|
2015-03-04 19:25:21 +00:00
|
|
|
print "Fetching merged dates...\r"
|
2014-11-19 10:43:55 +00:00
|
|
|
end
|
2014-11-17 15:54:13 +00:00
|
|
|
pull_requests = []
|
2015-03-23 15:13:44 +00:00
|
|
|
begin
|
|
|
|
response = @github.pull_requests.list @options[:user], @options[:project], :state => 'closed'
|
|
|
|
page_i = 0
|
|
|
|
response.each_page do |page|
|
|
|
|
page_i += PER_PAGE_NUMBER
|
|
|
|
count_pages = response.count_pages
|
|
|
|
print "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
|
|
|
|
pull_requests.concat(page)
|
|
|
|
end
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2015-03-21 00:06:59 +00:00
|
|
|
|
2015-02-20 09:51:07 +00:00
|
|
|
print " \r"
|
2014-12-15 13:19:56 +00:00
|
|
|
|
2015-02-19 15:30:15 +00:00
|
|
|
@pull_requests.each { |pr|
|
|
|
|
fetched_pr = pull_requests.find { |fpr|
|
2015-02-18 21:32:40 +00:00
|
|
|
fpr.number == pr.number }
|
|
|
|
pr[:merged_at] = fetched_pr[:merged_at]
|
|
|
|
pull_requests.delete(fetched_pr)
|
|
|
|
}
|
2015-03-04 19:25:21 +00:00
|
|
|
|
|
|
|
if @options[:verbose]
|
|
|
|
puts 'Fetching merged dates... Done!'
|
|
|
|
end
|
|
|
|
|
2015-02-10 12:36:56 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def get_filtered_pull_requests
|
|
|
|
|
2015-03-18 08:18:58 +00:00
|
|
|
self.fetch_merged_at_pull_requests
|
2015-02-10 12:36:56 +00:00
|
|
|
|
2015-03-18 08:18:58 +00:00
|
|
|
filtered_pull_requests = @pull_requests.select {|pr| pr[:merged_at] != nil }
|
2014-11-19 12:56:08 +00:00
|
|
|
|
2015-02-18 20:54:12 +00:00
|
|
|
unless @options[:include_labels].nil?
|
2015-03-18 08:18:58 +00:00
|
|
|
filtered_pull_requests = @pull_requests.select { |issue|
|
2015-02-18 20:54:12 +00:00
|
|
|
#add all labels from @options[:incluse_labels] array
|
|
|
|
(issue.labels.map { |label| label.name } & @options[:include_labels]).any?
|
|
|
|
}
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 20:54:12 +00:00
|
|
|
unless @options[:exclude_labels].nil?
|
|
|
|
filtered_pull_requests = filtered_pull_requests.select { |issue|
|
|
|
|
#delete all labels from @options[:exclude_labels] array
|
|
|
|
!(issue.labels.map { |label| label.name } & @options[:exclude_labels]).any?
|
|
|
|
}
|
|
|
|
end
|
2014-11-19 13:45:24 +00:00
|
|
|
|
2015-02-18 20:54:12 +00:00
|
|
|
if @options[:add_issues_wo_labels]
|
2015-03-18 08:18:58 +00:00
|
|
|
issues_wo_labels = @pull_requests.select {
|
2015-02-18 20:54:12 +00:00
|
|
|
# add issues without any labels
|
|
|
|
|issue| !issue.labels.map { |label| label.name }.any?
|
2014-11-19 13:45:24 +00:00
|
|
|
}
|
2015-02-18 21:32:40 +00:00
|
|
|
filtered_pull_requests |= issues_wo_labels
|
2015-02-18 20:54:12 +00:00
|
|
|
end
|
2014-11-19 13:45:24 +00:00
|
|
|
|
2015-02-18 20:54:12 +00:00
|
|
|
|
|
|
|
if @options[:verbose]
|
|
|
|
puts "Filtered pull requests: #{filtered_pull_requests.count}"
|
2014-11-19 13:45:24 +00:00
|
|
|
end
|
2014-11-19 14:23:54 +00:00
|
|
|
|
2015-02-18 20:54:12 +00:00
|
|
|
filtered_pull_requests
|
2014-11-06 13:51:15 +00:00
|
|
|
end
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
def compund_changelog
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-24 14:03:38 +00:00
|
|
|
log = "# Change Log\n\n"
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 18:14:39 +00:00
|
|
|
if @options[:unreleased_only]
|
2015-02-18 16:27:57 +00:00
|
|
|
log += self.generate_log_between_tags(self.all_tags[0], nil)
|
2014-11-19 15:28:49 +00:00
|
|
|
elsif @options[:tag1] and @options[:tag2]
|
2014-11-17 15:54:13 +00:00
|
|
|
tag1 = @options[:tag1]
|
|
|
|
tag2 = @options[:tag2]
|
|
|
|
tags_strings = []
|
|
|
|
self.all_tags.each { |x| tags_strings.push(x['name']) }
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
if tags_strings.include?(tag1)
|
|
|
|
if tags_strings.include?(tag2)
|
2015-02-19 15:30:15 +00:00
|
|
|
to_a = tags_strings.map.with_index.to_a
|
|
|
|
hash = Hash[to_a]
|
2014-11-17 15:54:13 +00:00
|
|
|
index1 = hash[tag1]
|
|
|
|
index2 = hash[tag2]
|
|
|
|
log += self.generate_log_between_tags(self.all_tags[index1], self.all_tags[index2])
|
|
|
|
else
|
|
|
|
puts "Can't find tag #{tag2} -> exit"
|
|
|
|
exit
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
else
|
2014-11-17 15:54:13 +00:00
|
|
|
puts "Can't find tag #{tag1} -> exit"
|
2014-11-06 13:51:15 +00:00
|
|
|
exit
|
|
|
|
end
|
|
|
|
else
|
2014-11-17 15:54:13 +00:00
|
|
|
log += self.generate_log_for_all_tags
|
2014-11-06 13:51:15 +00:00
|
|
|
end
|
|
|
|
|
2015-02-24 16:08:50 +00:00
|
|
|
log += "\n\n\\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
|
2014-11-07 09:25:09 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
output_filename = "#{@options[:output]}"
|
|
|
|
File.open(output_filename, 'w') { |file| file.write(log) }
|
2015-02-18 20:29:30 +00:00
|
|
|
puts 'Done!'
|
|
|
|
puts "Generated log placed in #{`pwd`.strip!}/#{output_filename}"
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
def generate_log_for_all_tags
|
2015-02-25 11:52:41 +00:00
|
|
|
|
|
|
|
fetch_tags_dates
|
|
|
|
|
|
|
|
if @options[:verbose]
|
|
|
|
puts "Sorting tags.."
|
|
|
|
end
|
|
|
|
|
|
|
|
@all_tags.sort_by! { |x| self.get_time_of_tag(x) }.reverse!
|
|
|
|
|
|
|
|
if @options[:verbose]
|
|
|
|
puts "Generating log.."
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
log = ''
|
2014-11-17 15:43:37 +00:00
|
|
|
|
2015-03-05 12:33:01 +00:00
|
|
|
if @options[:unreleased] && @all_tags.count != 0
|
2015-02-25 11:52:41 +00:00
|
|
|
unreleased_log = self.generate_log_between_tags(self.all_tags[0], nil)
|
|
|
|
if unreleased_log
|
|
|
|
log += unreleased_log
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
(1 ... self.all_tags.size).each { |index|
|
|
|
|
log += self.generate_log_between_tags(self.all_tags[index], self.all_tags[index-1])
|
|
|
|
}
|
2015-03-05 12:33:01 +00:00
|
|
|
if @all_tags.count != 0
|
|
|
|
log += generate_log_between_tags(nil, self.all_tags.last)
|
|
|
|
end
|
2015-02-25 11:52:41 +00:00
|
|
|
|
|
|
|
log
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_tags_dates
|
2015-02-17 19:18:52 +00:00
|
|
|
if @options[:verbose]
|
2015-02-18 20:29:30 +00:00
|
|
|
print "Fetching tags dates..\r"
|
2015-02-17 19:18:52 +00:00
|
|
|
end
|
|
|
|
|
2014-12-15 12:15:39 +00:00
|
|
|
# Async fetching tags:
|
|
|
|
threads = []
|
2015-02-18 20:29:30 +00:00
|
|
|
i = 0
|
|
|
|
all = @all_tags.count
|
2014-12-15 12:15:39 +00:00
|
|
|
@all_tags.each { |tag|
|
2015-02-17 19:18:52 +00:00
|
|
|
# explicit set @tag_times_hash to write data safety.
|
2015-02-18 20:29:30 +00:00
|
|
|
threads << Thread.new {
|
|
|
|
self.get_time_of_tag(tag, @tag_times_hash)
|
|
|
|
if @options[:verbose]
|
|
|
|
print "Fetching tags dates: #{i+1}/#{all}\r"
|
2015-02-18 21:36:07 +00:00
|
|
|
i+=1
|
2015-02-18 20:29:30 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
}
|
2014-12-15 12:15:39 +00:00
|
|
|
}
|
2015-02-18 20:29:30 +00:00
|
|
|
|
2015-02-18 21:36:07 +00:00
|
|
|
print " \r"
|
|
|
|
|
2014-12-15 12:15:39 +00:00
|
|
|
threads.each { |thr| thr.join }
|
2014-11-17 15:43:37 +00:00
|
|
|
|
2015-02-18 20:29:30 +00:00
|
|
|
if @options[:verbose]
|
|
|
|
puts 'Fetching tags: Done!'
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
end
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
def is_megred(number)
|
2015-03-23 15:13:44 +00:00
|
|
|
@github.pull_requests.merged? @options[:user], @options[:project], number
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-07 09:45:01 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
def get_all_tags
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
if @options[:verbose]
|
2014-12-15 13:19:56 +00:00
|
|
|
print "Fetching tags...\r"
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
tags = []
|
2015-03-21 00:06:59 +00:00
|
|
|
|
|
|
|
begin
|
2015-03-23 15:13:44 +00:00
|
|
|
response = @github.repos.tags @options[:user], @options[:project]
|
|
|
|
page_i = 0
|
|
|
|
count_pages = response.count_pages
|
|
|
|
response.each_page do |page|
|
|
|
|
page_i += PER_PAGE_NUMBER
|
|
|
|
print "Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
|
|
|
|
tags.concat(page)
|
|
|
|
end
|
|
|
|
print " \r"
|
|
|
|
if @options[:verbose]
|
|
|
|
puts "Found #{tags.count} tags"
|
|
|
|
end
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
tags
|
2014-11-17 14:53:47 +00:00
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-03-05 08:33:54 +00:00
|
|
|
def fetch_github_token
|
|
|
|
env_var = @options[:token] ? @options[:token] : (ENV.fetch 'CHANGELOG_GITHUB_TOKEN', nil)
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
unless env_var
|
|
|
|
puts "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found.".yellow
|
|
|
|
puts "This script can make only 50 requests to GitHub API per hour without token!".yellow
|
|
|
|
end
|
2014-11-12 15:59:40 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
@github_token ||= env_var
|
2014-11-12 15:59:40 +00:00
|
|
|
|
|
|
|
end
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
def generate_log_between_tags(older_tag, newer_tag)
|
2015-02-25 11:52:41 +00:00
|
|
|
# older_tag nil - means it's first tag, newer_tag nil - means it unreleased section
|
2015-03-04 19:25:21 +00:00
|
|
|
filtered_pull_requests = delete_by_time(@pull_requests, :actual_date, older_tag, newer_tag)
|
2015-02-18 16:27:57 +00:00
|
|
|
filtered_issues = delete_by_time(@issues, :actual_date, older_tag, newer_tag)
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 19:43:05 +00:00
|
|
|
newer_tag_name = newer_tag.nil? ? nil : newer_tag['name']
|
2015-02-25 11:52:41 +00:00
|
|
|
older_tag_name = older_tag.nil? ? nil : older_tag['name']
|
2014-12-22 13:14:01 +00:00
|
|
|
|
2015-02-18 19:43:05 +00:00
|
|
|
if @options[:filter_issues_by_milestone]
|
|
|
|
#delete excess irrelevant issues (according milestones)
|
|
|
|
filtered_issues = filter_by_milestone(filtered_issues, newer_tag_name, @issues)
|
|
|
|
filtered_pull_requests = filter_by_milestone(filtered_pull_requests, newer_tag_name, @pull_requests)
|
2015-02-17 23:24:51 +00:00
|
|
|
end
|
2014-12-22 13:31:49 +00:00
|
|
|
|
2015-02-25 11:52:41 +00:00
|
|
|
if filtered_issues.empty? && filtered_pull_requests.empty? && newer_tag.nil?
|
2015-02-25 17:02:41 +00:00
|
|
|
# do not generate empty unreleased section
|
2015-03-18 07:37:29 +00:00
|
|
|
return ''
|
2015-02-25 11:52:41 +00:00
|
|
|
end
|
2015-02-18 16:27:57 +00:00
|
|
|
|
|
|
|
self.create_log(filtered_pull_requests, filtered_issues, newer_tag, older_tag_name)
|
2015-02-17 23:24:51 +00:00
|
|
|
end
|
2014-12-22 13:41:20 +00:00
|
|
|
|
2015-02-17 23:24:51 +00:00
|
|
|
def filter_by_milestone(filtered_issues, newer_tag_name, src_array)
|
|
|
|
filtered_issues.select! { |issue|
|
2015-02-18 16:27:57 +00:00
|
|
|
# leave issues without milestones
|
2015-02-17 23:24:51 +00:00
|
|
|
if issue.milestone.nil?
|
|
|
|
true
|
|
|
|
else
|
|
|
|
#check, that this milestone in tag list:
|
2015-02-18 16:27:57 +00:00
|
|
|
@all_tags.find { |tag| tag.name == issue.milestone.title }.nil?
|
2015-02-17 23:24:51 +00:00
|
|
|
end
|
|
|
|
}
|
2015-02-18 16:27:57 +00:00
|
|
|
unless newer_tag_name.nil?
|
2015-02-17 23:24:51 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
#add missed issues (according milestones)
|
|
|
|
issues_to_add = src_array.select { |issue|
|
|
|
|
if issue.milestone.nil?
|
2015-02-17 23:24:51 +00:00
|
|
|
false
|
|
|
|
else
|
2015-02-18 16:27:57 +00:00
|
|
|
#check, that this milestone in tag list:
|
|
|
|
milestone_is_tag = @all_tags.find { |tag|
|
|
|
|
tag.name == issue.milestone.title
|
|
|
|
}
|
|
|
|
|
|
|
|
if milestone_is_tag.nil?
|
|
|
|
false
|
|
|
|
else
|
|
|
|
issue.milestone.title == newer_tag_name
|
|
|
|
end
|
2015-02-17 23:24:51 +00:00
|
|
|
end
|
2015-02-18 16:27:57 +00:00
|
|
|
}
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
filtered_issues |= issues_to_add
|
|
|
|
end
|
2015-02-18 19:43:05 +00:00
|
|
|
filtered_issues
|
2014-11-19 12:14:28 +00:00
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
def delete_by_time(array, hash_key, older_tag = nil, newer_tag = nil)
|
2015-02-17 17:45:11 +00:00
|
|
|
|
2015-03-05 12:33:01 +00:00
|
|
|
raise 'At least one of the tags should be not nil!' if (older_tag.nil? && newer_tag.nil?)
|
2015-02-18 16:27:57 +00:00
|
|
|
|
|
|
|
newer_tag_time = self.get_time_of_tag(newer_tag)
|
2015-02-17 17:45:11 +00:00
|
|
|
older_tag_time = self.get_time_of_tag(older_tag)
|
|
|
|
|
2014-11-19 15:28:49 +00:00
|
|
|
array.select { |req|
|
2014-11-19 12:14:28 +00:00
|
|
|
if req[hash_key]
|
2015-02-17 17:59:58 +00:00
|
|
|
t = Time.parse(req[hash_key]).utc
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2014-11-19 12:14:28 +00:00
|
|
|
if older_tag_time.nil?
|
2014-11-19 15:28:49 +00:00
|
|
|
tag_in_range_old = true
|
2014-11-19 12:14:28 +00:00
|
|
|
else
|
2014-11-19 15:28:49 +00:00
|
|
|
tag_in_range_old = t > older_tag_time
|
2014-11-19 12:14:28 +00:00
|
|
|
end
|
2014-11-07 15:45:35 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
if newer_tag_time.nil?
|
|
|
|
tag_in_range_new = true
|
|
|
|
else
|
|
|
|
tag_in_range_new = t <= newer_tag_time
|
|
|
|
end
|
|
|
|
|
2014-11-07 15:45:35 +00:00
|
|
|
|
2014-11-19 15:28:49 +00:00
|
|
|
tag_in_range = (tag_in_range_old) && (tag_in_range_new)
|
2014-12-03 09:09:11 +00:00
|
|
|
|
2014-11-19 15:28:49 +00:00
|
|
|
tag_in_range
|
2014-11-17 15:54:13 +00:00
|
|
|
else
|
2014-11-19 15:28:49 +00:00
|
|
|
false
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
|
|
|
}
|
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-01-24 12:44:34 +00:00
|
|
|
# @param [Array] pull_requests
|
|
|
|
# @param [Array] issues
|
|
|
|
# @param [String] older_tag_name
|
|
|
|
# @return [String]
|
2015-02-18 16:27:57 +00:00
|
|
|
def create_log(pull_requests, issues, newer_tag, older_tag_name = nil)
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
newer_tag_time = newer_tag.nil? ? nil : self.get_time_of_tag(newer_tag)
|
|
|
|
newer_tag_name = newer_tag.nil? ? nil : newer_tag['name']
|
2014-12-19 21:55:42 +00:00
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
github_site = options[:github_site] || 'https://github.com'
|
2015-01-24 12:42:11 +00:00
|
|
|
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
|
|
|
|
|
2015-02-18 16:27:57 +00:00
|
|
|
if newer_tag.nil?
|
2015-03-03 15:43:22 +00:00
|
|
|
newer_tag_name = @options[:unreleased_label]
|
2015-02-25 17:02:41 +00:00
|
|
|
newer_tag_link = 'HEAD'
|
2015-02-18 16:27:57 +00:00
|
|
|
newer_tag_time = Time.new
|
|
|
|
else
|
2015-02-25 17:02:41 +00:00
|
|
|
newer_tag_link = newer_tag_name
|
2015-02-18 16:27:57 +00:00
|
|
|
end
|
|
|
|
|
2015-02-19 15:30:15 +00:00
|
|
|
log = ''
|
2015-02-18 16:43:17 +00:00
|
|
|
|
2015-02-25 17:02:41 +00:00
|
|
|
log += generate_header(log, newer_tag_name, newer_tag_link, newer_tag_time, older_tag_name, project_url)
|
2014-11-07 15:45:35 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
if @options[:issues]
|
|
|
|
# Generate issues:
|
2015-02-25 17:02:41 +00:00
|
|
|
issues_a = []
|
|
|
|
enhancement_a = []
|
|
|
|
bugs_a =[]
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
issues.each { |dict|
|
2015-02-25 17:02:41 +00:00
|
|
|
added = false
|
2014-11-17 15:54:13 +00:00
|
|
|
dict.labels.each { |label|
|
|
|
|
if label.name == 'bug'
|
2015-02-25 17:02:41 +00:00
|
|
|
bugs_a.push dict
|
|
|
|
added = true
|
|
|
|
next
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
|
|
|
if label.name == 'enhancement'
|
2015-02-25 17:02:41 +00:00
|
|
|
enhancement_a.push dict
|
|
|
|
added = true
|
|
|
|
next
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
|
|
|
}
|
2015-02-25 17:02:41 +00:00
|
|
|
unless added
|
|
|
|
issues_a.push dict
|
2014-11-07 16:14:26 +00:00
|
|
|
end
|
2015-02-25 17:02:41 +00:00
|
|
|
}
|
2014-11-17 15:54:13 +00:00
|
|
|
|
2015-02-25 17:02:41 +00:00
|
|
|
log += generate_log_from_array(enhancement_a, @options[:enhancement_prefix])
|
|
|
|
log += generate_log_from_array(bugs_a, @options[:bug_prefix])
|
|
|
|
log += generate_log_from_array(issues_a, @options[:issue_prefix])
|
2015-03-03 17:19:08 +00:00
|
|
|
end
|
2014-11-07 15:45:35 +00:00
|
|
|
|
2015-03-03 17:19:08 +00:00
|
|
|
if @options[:pulls]
|
|
|
|
# Generate pull requests:
|
|
|
|
log += generate_log_from_array(pull_requests, @options[:merge_prefix])
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2015-02-18 16:27:57 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
log
|
|
|
|
end
|
2014-11-10 14:13:34 +00:00
|
|
|
|
2015-02-25 17:02:41 +00:00
|
|
|
def generate_log_from_array(issues, prefix)
|
|
|
|
log = ''
|
|
|
|
if options[:simple_list].nil? && issues.any?
|
|
|
|
log += "#{prefix}\n\n"
|
|
|
|
end
|
|
|
|
|
2015-02-27 12:53:21 +00:00
|
|
|
if issues.any?
|
|
|
|
issues.each { |issue|
|
|
|
|
merge_string = @generator.get_string_for_issue(issue)
|
|
|
|
log += "- #{merge_string}\n\n"
|
|
|
|
}
|
|
|
|
end
|
|
|
|
log
|
2015-02-25 17:02:41 +00:00
|
|
|
end
|
|
|
|
|
2015-02-19 15:30:15 +00:00
|
|
|
def generate_header(log, newer_tag_name, newer_tag_name2, newer_tag_time, older_tag_name, project_url)
|
|
|
|
|
|
|
|
#Generate date string:
|
|
|
|
time_string = newer_tag_time.strftime @options[:format]
|
|
|
|
|
|
|
|
# Generate tag name and link
|
2015-03-03 15:43:22 +00:00
|
|
|
if newer_tag_name.equal? @options[:unreleased_label]
|
|
|
|
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name2})\n\n"
|
|
|
|
else
|
|
|
|
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name2}) (#{time_string})\n\n"
|
|
|
|
end
|
2015-02-19 15:30:15 +00:00
|
|
|
|
|
|
|
if @options[:compare_link] && older_tag_name
|
|
|
|
# Generate compare link
|
|
|
|
log += "[Full Changelog](#{project_url}/compare/#{older_tag_name}...#{newer_tag_name2})\n\n"
|
|
|
|
end
|
2015-02-25 11:52:41 +00:00
|
|
|
|
2015-02-19 15:30:15 +00:00
|
|
|
log
|
|
|
|
end
|
|
|
|
|
2015-02-17 19:18:52 +00:00
|
|
|
def get_time_of_tag(tag_name, tag_times_hash = @tag_times_hash)
|
2015-02-17 17:45:11 +00:00
|
|
|
|
|
|
|
if tag_name.nil?
|
|
|
|
return nil
|
|
|
|
end
|
2014-11-07 15:45:35 +00:00
|
|
|
|
2015-02-17 19:18:52 +00:00
|
|
|
if tag_times_hash[tag_name['name']]
|
2015-02-17 17:45:11 +00:00
|
|
|
return @tag_times_hash[tag_name['name']]
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-03-23 15:13:44 +00:00
|
|
|
begin
|
|
|
|
github_git_data_commits_get = @github.git_data.commits.get @options[:user], @options[:project], tag_name['commit']['sha']
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2014-11-17 15:54:13 +00:00
|
|
|
time_string = github_git_data_commits_get['committer']['date']
|
2015-02-17 17:45:11 +00:00
|
|
|
@tag_times_hash[tag_name['name']] = Time.parse(time_string)
|
2014-11-06 13:51:15 +00:00
|
|
|
end
|
|
|
|
|
2015-02-18 20:21:00 +00:00
|
|
|
def get_filtered_issues
|
2014-11-19 10:43:55 +00:00
|
|
|
|
2015-02-18 21:32:40 +00:00
|
|
|
issues = @issues
|
2014-12-15 13:53:02 +00:00
|
|
|
|
2015-02-18 20:11:11 +00:00
|
|
|
filtered_issues = issues
|
2015-02-18 20:08:10 +00:00
|
|
|
|
|
|
|
unless @options[:include_labels].nil?
|
|
|
|
filtered_issues = issues.select { |issue|
|
|
|
|
#add all labels from @options[:incluse_labels] array
|
|
|
|
(issue.labels.map { |label| label.name } & @options[:include_labels]).any?
|
|
|
|
}
|
|
|
|
end
|
2014-11-17 14:53:47 +00:00
|
|
|
|
2015-02-18 20:08:10 +00:00
|
|
|
unless @options[:exclude_labels].nil?
|
|
|
|
filtered_issues = filtered_issues.select { |issue|
|
|
|
|
#delete all labels from @options[:exclude_labels] array
|
|
|
|
!(issue.labels.map { |label| label.name } & @options[:exclude_labels]).any?
|
|
|
|
}
|
|
|
|
end
|
2015-02-17 20:39:37 +00:00
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
if @options[:add_issues_wo_labels]
|
|
|
|
issues_wo_labels = issues.select {
|
|
|
|
# add issues without any labels
|
2014-12-22 13:41:20 +00:00
|
|
|
|issue| !issue.labels.map { |label| label.name }.any?
|
2014-11-17 15:54:13 +00:00
|
|
|
}
|
2015-02-18 21:32:40 +00:00
|
|
|
filtered_issues |= issues_wo_labels
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-17 14:53:47 +00:00
|
|
|
|
2015-02-17 20:39:37 +00:00
|
|
|
|
|
|
|
if @options[:verbose]
|
2015-02-18 20:21:00 +00:00
|
|
|
puts "Filtered issues: #{filtered_issues.count}"
|
2015-02-17 20:39:37 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
filtered_issues
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2015-02-18 21:32:40 +00:00
|
|
|
def fetch_issues_and_pull_requests
|
2015-02-18 20:21:00 +00:00
|
|
|
if @options[:verbose]
|
|
|
|
print "Fetching closed issues...\r"
|
|
|
|
end
|
2015-03-21 00:06:59 +00:00
|
|
|
issues = []
|
|
|
|
|
2015-03-23 15:13:44 +00:00
|
|
|
begin
|
|
|
|
response = @github.issues.list user: @options[:user], repo: @options[:project], state: 'closed', filter: 'all', labels: nil
|
|
|
|
page_i = 0
|
|
|
|
count_pages = response.count_pages
|
|
|
|
response.each_page do |page|
|
|
|
|
page_i += PER_PAGE_NUMBER
|
|
|
|
print "Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
|
|
|
|
issues.concat(page)
|
|
|
|
break if @options[:max_issues] && issues.length >= @options[:max_issues]
|
|
|
|
end
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2015-02-18 20:21:00 +00:00
|
|
|
|
|
|
|
print " \r"
|
|
|
|
|
|
|
|
if @options[:verbose]
|
|
|
|
puts "Received issues: #{issues.count}"
|
|
|
|
end
|
|
|
|
|
|
|
|
# remove pull request from issues:
|
2015-02-19 15:30:15 +00:00
|
|
|
issues_wo_pr = issues.select { |x|
|
2015-02-18 20:21:00 +00:00
|
|
|
x.pull_request == nil
|
|
|
|
}
|
2015-02-18 21:32:40 +00:00
|
|
|
pull_requests = issues.select { |x|
|
|
|
|
x.pull_request != nil
|
|
|
|
}
|
|
|
|
return issues_wo_pr, pull_requests
|
2015-02-18 20:21:00 +00:00
|
|
|
end
|
|
|
|
|
2015-03-04 19:25:21 +00:00
|
|
|
def fetch_event_for_issues_and_pr
|
2014-11-17 15:54:13 +00:00
|
|
|
if @options[:verbose]
|
2015-03-04 19:25:21 +00:00
|
|
|
print "Fetching events for issues and PR: 0/#{@issues.count + @pull_requests.count}\r"
|
2015-02-17 19:16:58 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Async fetching events:
|
|
|
|
|
2015-03-06 12:32:02 +00:00
|
|
|
fetch_events_async(@issues + @pull_requests)
|
2015-02-17 19:16:58 +00:00
|
|
|
|
2015-03-04 19:25:21 +00:00
|
|
|
#to clear line from prev print
|
|
|
|
print " \r"
|
|
|
|
|
2015-02-17 19:16:58 +00:00
|
|
|
if @options[:verbose]
|
2015-03-06 12:32:02 +00:00
|
|
|
puts 'Fetching events for issues and PR: Done!'
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-07 15:45:35 +00:00
|
|
|
end
|
|
|
|
|
2015-03-06 12:32:02 +00:00
|
|
|
def fetch_events_async(issues)
|
|
|
|
i = 0
|
2015-03-06 13:09:13 +00:00
|
|
|
max_thread_number = 50
|
2015-03-06 12:32:02 +00:00
|
|
|
threads = []
|
|
|
|
issues.each_slice(max_thread_number) { |issues_slice|
|
|
|
|
issues_slice.each { |issue|
|
|
|
|
threads << Thread.new {
|
2015-03-23 15:13:44 +00:00
|
|
|
begin
|
|
|
|
obj = @github.issues.events.list user: @options[:user], repo: @options[:project], issue_number: issue['number']
|
|
|
|
rescue
|
|
|
|
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
|
|
|
|
end
|
2015-03-06 12:32:02 +00:00
|
|
|
issue[:events] = obj.body
|
|
|
|
print "Fetching events for issues and PR: #{i+1}/#{@issues.count + @pull_requests.count}\r"
|
|
|
|
i +=1
|
|
|
|
}
|
|
|
|
}
|
|
|
|
threads.each { |thr| thr.join }
|
|
|
|
threads = []
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2014-11-07 15:04:03 +00:00
|
|
|
end
|
|
|
|
|
2014-11-17 15:54:13 +00:00
|
|
|
if __FILE__ == $0
|
2014-11-18 13:20:57 +00:00
|
|
|
GitHubChangelogGenerator::ChangelogGenerator.new.compund_changelog
|
2014-11-17 15:54:13 +00:00
|
|
|
end
|
2014-11-06 13:51:15 +00:00
|
|
|
|
2015-01-24 12:42:11 +00:00
|
|
|
end
|