github-changelog-generator/lib/github_changelog_generator.rb

667 lines
22 KiB
Ruby
Raw Normal View History

2014-11-06 13:51:15 +00:00
#!/usr/bin/env ruby
require "github_api"
require "json"
require "colorize"
require "benchmark"
2014-12-15 12:15:39 +00:00
require_relative "github_changelog_generator/parser"
require_relative "github_changelog_generator/generator"
require_relative "github_changelog_generator/version"
require_relative "github_changelog_generator/reader"
require_relative "github_changelog_generator/fetcher"
2014-11-06 13:51:15 +00:00
2014-11-17 15:54:13 +00:00
module GitHubChangelogGenerator
2015-04-03 15:59:37 +00:00
# Default error for ChangelogGenerator
class ChangelogGeneratorError < StandardError
end
# Main class and entry point for this script.
2014-11-17 15:54:13 +00:00
class ChangelogGenerator
attr_accessor :options, :all_tags, :github
2014-11-06 13:51:15 +00:00
PER_PAGE_NUMBER = 30
GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: GitHub API rate limit (5000 per hour) exceeded, change log may be " \
"missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
2015-04-03 15:59:37 +00:00
# Class, responsible for whole change log generation cycle
# @return initialised insance of ChangelogGenerator
2014-11-18 13:20:57 +00:00
def initialize
2014-11-17 15:54:13 +00:00
@options = Parser.parse_options
2014-11-06 13:51:15 +00:00
@fetcher = GitHubChangelogGenerator::Fetcher.new @options
2015-03-26 13:43:47 +00:00
github_options = { per_page: PER_PAGE_NUMBER }
github_options[:oauth_token] = @github_token unless @github_token.nil?
github_options[:endpoint] = options[:github_endpoint] unless options[:github_endpoint].nil?
github_options[:site] = options[:github_endpoint] unless options[:github_site].nil?
begin
@github = Github.new github_options
rescue
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
end
2014-12-03 14:08:20 +00:00
@generator = Generator.new(@options)
2015-03-26 21:56:47 +00:00
@all_tags = get_filtered_tags
2015-03-26 13:43:47 +00:00
@issues, @pull_requests = fetch_issues_and_pull_requests
2015-02-18 21:32:40 +00:00
2015-04-03 21:25:14 +00:00
@pull_requests = @options[:pulls] ? get_filtered_pull_requests : []
2015-02-18 21:32:40 +00:00
2015-04-03 21:25:14 +00:00
@issues = @options[:issues] ? get_filtered_issues : []
2015-03-04 19:25:21 +00:00
fetch_event_for_issues_and_pr
detect_actual_closed_dates
2014-11-17 15:54:13 +00:00
@tag_times_hash = {}
end
2014-11-06 13:51:15 +00:00
2015-03-26 21:56:47 +00:00
# Return tags after filtering tags in lists provided by option: --between-tags & --exclude-tags
#
# @return [Array]
def get_filtered_tags
all_tags = get_all_tags
filtered_tags = []
if @options[:between_tags]
@options[:between_tags].each do |tag|
unless all_tags.include? tag
puts "Warning: can't find tag #{tag}, specified with --between-tags option.".yellow
end
end
filtered_tags = all_tags.select { |tag| @options[:between_tags].include? tag }
end
filtered_tags
end
2015-02-17 20:39:37 +00:00
def detect_actual_closed_dates
2015-02-17 21:21:19 +00:00
if @options[:verbose]
2015-02-25 17:02:41 +00:00
print "Fetching closed dates for issues...\r"
2015-02-17 21:21:19 +00:00
end
threads = []
2015-03-04 19:25:21 +00:00
2015-02-18 16:27:57 +00:00
@issues.each { |issue|
2015-02-17 21:21:19 +00:00
threads << Thread.new {
find_closed_date_by_commit(issue)
}
2015-02-17 20:39:37 +00:00
}
2015-03-04 19:25:21 +00:00
@pull_requests.each { |pull_request|
threads << Thread.new {
find_closed_date_by_commit(pull_request)
}
}
2015-03-26 13:43:47 +00:00
threads.each(&:join)
2015-02-17 21:21:19 +00:00
if @options[:verbose]
puts "Fetching closed dates for issues: Done!"
2015-02-17 21:21:19 +00:00
end
2015-02-17 20:39:37 +00:00
end
def find_closed_date_by_commit(issue)
unless issue["events"].nil?
2015-03-26 13:43:47 +00:00
# if it's PR -> then find "merged event", in case of usual issue -> fond closed date
compare_string = issue[:merged_at].nil? ? "closed" : "merged"
2015-02-17 21:09:07 +00:00
# reverse! - to find latest closed event. (event goes in date order)
issue["events"].reverse!.each { |event|
2015-03-04 19:25:21 +00:00
if event[:event].eql? compare_string
2015-02-17 21:09:07 +00:00
if event[:commit_id].nil?
issue[:actual_date] = issue[:closed_at]
else
2015-03-03 13:34:19 +00:00
begin
2015-03-24 11:49:37 +00:00
commit = @github.git_data.commits.get @options[:user], @options[:project], event[:commit_id]
2015-03-03 13:34:19 +00:00
issue[:actual_date] = commit[:author][:date]
rescue
puts "Warning: Can't fetch commit #{event[:commit_id]}. It is probably referenced from another repo.".yellow
2015-03-03 13:34:19 +00:00
issue[:actual_date] = issue[:closed_at]
end
2015-02-17 21:09:07 +00:00
end
break
end
2015-02-17 20:39:37 +00:00
}
end
2015-03-26 13:43:47 +00:00
# TODO: assert issues, that remain without 'actual_date' hash for some reason.
2015-02-17 20:39:37 +00:00
end
2014-11-17 15:54:13 +00:00
def print_json(json)
puts JSON.pretty_generate(json)
end
2014-11-06 13:51:15 +00:00
2015-04-04 00:10:16 +00:00
# This method fetch missing required attributes for pull requests
# :merged_at - is a date, when issue PR was merged.
# More correct to use this date, not closed date.
2015-02-18 21:32:40 +00:00
def fetch_merged_at_pull_requests
2014-11-19 10:43:55 +00:00
if @options[:verbose]
2015-03-04 19:25:21 +00:00
print "Fetching merged dates...\r"
2014-11-19 10:43:55 +00:00
end
2014-11-17 15:54:13 +00:00
pull_requests = []
begin
response = @github.pull_requests.list @options[:user], @options[:project], state: "closed"
page_i = 0
response.each_page do |page|
page_i += PER_PAGE_NUMBER
count_pages = response.count_pages
print "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
pull_requests.concat(page)
end
rescue
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
end
2015-02-20 09:51:07 +00:00
print " \r"
2015-02-19 15:30:15 +00:00
@pull_requests.each { |pr|
fetched_pr = pull_requests.find { |fpr|
2015-03-26 13:43:47 +00:00
fpr.number == pr.number
}
2015-02-18 21:32:40 +00:00
pr[:merged_at] = fetched_pr[:merged_at]
pull_requests.delete(fetched_pr)
}
2015-03-04 19:25:21 +00:00
if @options[:verbose]
puts "Fetching merged dates: Done!"
2015-03-04 19:25:21 +00:00
end
2015-02-10 12:36:56 +00:00
end
2015-04-04 00:10:16 +00:00
# This method fetches missing params for PR and filter them by specified options
# It include add all PR's with labels from @options[:include_labels] array
# And exclude all from :exclude_labels array.
# @return [Array] filtered PR's
2015-02-10 12:36:56 +00:00
def get_filtered_pull_requests
2015-03-26 13:43:47 +00:00
fetch_merged_at_pull_requests
2015-02-10 12:36:56 +00:00
2015-04-04 00:10:16 +00:00
filtered_pull_requests = include_issues_by_labels(@pull_requests)
2014-11-06 13:51:15 +00:00
2015-04-03 21:22:40 +00:00
filtered_pull_requests = exclude_issues_by_labels(filtered_pull_requests)
2015-02-18 20:54:12 +00:00
if @options[:verbose]
puts "Filtered pull requests: #{filtered_pull_requests.count}"
end
2014-11-19 14:23:54 +00:00
2015-02-18 20:54:12 +00:00
filtered_pull_requests
2014-11-06 13:51:15 +00:00
end
# Include issues with labels, specified in :include_labels
# @param [Array] issues to filter
# @return [Array] filtered array of issues
2015-04-04 00:10:16 +00:00
def include_issues_by_labels(issues)
filtered_issues = @options[:include_labels].nil? ? issues : issues.select { |issue| (issue.labels.map(&:name) & @options[:include_labels]).any? }
2014-11-06 13:51:15 +00:00
2015-02-18 20:54:12 +00:00
if @options[:add_issues_wo_labels]
2015-04-04 00:10:16 +00:00
issues_wo_labels = issues.select { |issue|
2015-03-26 13:43:47 +00:00
!issue.labels.map(&:name).any?
}
filtered_issues |= issues_wo_labels
2015-02-18 20:54:12 +00:00
end
filtered_issues
2014-11-06 13:51:15 +00:00
end
2015-04-03 21:22:40 +00:00
# delete all labels with labels from @options[:exclude_labels] array
# @param [Array] issues
# @return [Array] filtered array
def exclude_issues_by_labels(issues)
unless @options[:exclude_labels].nil?
issues = issues.select { |issue|
!(issue.labels.map(&:name) & @options[:exclude_labels]).any?
}
end
issues
end
2015-04-03 15:59:37 +00:00
# The entry point of this script to generate change log
# @raise (ChangelogGeneratorError) Is thrown when one of specified tags was not found in list of tags.
2015-03-26 20:47:17 +00:00
def compound_changelog
2015-02-24 14:03:38 +00:00
log = "# Change Log\n\n"
2014-11-06 13:51:15 +00:00
if @options[:unreleased_only]
2015-03-26 13:43:47 +00:00
log += generate_log_between_tags(all_tags[0], nil)
2014-11-19 15:28:49 +00:00
elsif @options[:tag1] and @options[:tag2]
2014-11-17 15:54:13 +00:00
tag1 = @options[:tag1]
tag2 = @options[:tag2]
tags_strings = []
all_tags.each { |x| tags_strings.push(x["name"]) }
2014-11-06 13:51:15 +00:00
2014-11-17 15:54:13 +00:00
if tags_strings.include?(tag1)
if tags_strings.include?(tag2)
2015-02-19 15:30:15 +00:00
to_a = tags_strings.map.with_index.to_a
hash = Hash[to_a]
2014-11-17 15:54:13 +00:00
index1 = hash[tag1]
index2 = hash[tag2]
2015-03-26 13:43:47 +00:00
log += generate_log_between_tags(all_tags[index1], all_tags[index2])
2014-11-17 15:54:13 +00:00
else
2015-04-03 21:08:00 +00:00
fail ChangelogGeneratorError, "Can't find tag #{tag2} -> exit".red
2014-11-17 15:54:13 +00:00
end
2014-11-06 13:51:15 +00:00
else
2015-04-03 21:08:00 +00:00
fail ChangelogGeneratorError, "Can't find tag #{tag1} -> exit".red
2014-11-06 13:51:15 +00:00
end
else
2015-03-26 13:43:47 +00:00
log += generate_log_for_all_tags
2014-11-06 13:51:15 +00:00
end
2015-02-24 16:08:50 +00:00
log += "\n\n\\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
2014-11-17 15:54:13 +00:00
output_filename = "#{@options[:output]}"
File.open(output_filename, "w") { |file| file.write(log) }
puts "Done!"
puts "Generated log placed in #{`pwd`.strip!}/#{output_filename}"
2014-11-17 15:54:13 +00:00
end
2014-11-06 13:51:15 +00:00
2015-04-04 00:27:05 +00:00
# The full cycle of generation for whole project
# @return [String] The complete change log
2014-11-17 15:54:13 +00:00
def generate_log_for_all_tags
2015-02-25 11:52:41 +00:00
fetch_tags_dates
if @options[:verbose]
puts "Sorting tags..."
2015-02-25 11:52:41 +00:00
end
2015-03-26 13:43:47 +00:00
@all_tags.sort_by! { |x| get_time_of_tag(x) }.reverse!
2015-02-25 11:52:41 +00:00
if @options[:verbose]
puts "Generating log..."
2015-02-25 11:52:41 +00:00
end
log = ""
2014-11-17 15:43:37 +00:00
2015-03-05 12:33:01 +00:00
if @options[:unreleased] && @all_tags.count != 0
2015-03-26 13:43:47 +00:00
unreleased_log = generate_log_between_tags(all_tags[0], nil)
2015-02-25 11:52:41 +00:00
if unreleased_log
log += unreleased_log
end
end
2015-03-26 13:43:47 +00:00
(1...all_tags.size).each { |index|
log += generate_log_between_tags(all_tags[index], all_tags[index - 1])
2015-02-25 11:52:41 +00:00
}
2015-03-05 12:33:01 +00:00
if @all_tags.count != 0
2015-03-26 13:43:47 +00:00
log += generate_log_between_tags(nil, all_tags.last)
2015-03-05 12:33:01 +00:00
end
2015-02-25 11:52:41 +00:00
log
end
2015-04-04 00:27:05 +00:00
# Async fetching of all tags dates
2015-02-25 11:52:41 +00:00
def fetch_tags_dates
2015-02-17 19:18:52 +00:00
if @options[:verbose]
print "Fetching tag dates...\r"
2015-02-17 19:18:52 +00:00
end
2014-12-15 12:15:39 +00:00
# Async fetching tags:
threads = []
i = 0
all = @all_tags.count
2014-12-15 12:15:39 +00:00
@all_tags.each { |tag|
2015-02-17 19:18:52 +00:00
# explicit set @tag_times_hash to write data safety.
threads << Thread.new {
2015-03-26 13:43:47 +00:00
get_time_of_tag(tag, @tag_times_hash)
if @options[:verbose]
2015-03-26 13:43:47 +00:00
print "Fetching tags dates: #{i + 1}/#{all}\r"
i += 1
end
}
2014-12-15 12:15:39 +00:00
}
2015-02-18 21:36:07 +00:00
print " \r"
2015-03-26 13:43:47 +00:00
threads.each(&:join)
2014-11-17 15:43:37 +00:00
if @options[:verbose]
puts "Fetching tags dates: #{i} Done!"
end
2014-11-06 13:51:15 +00:00
end
2014-11-17 15:54:13 +00:00
def get_all_tags
if @options[:verbose]
print "Fetching tags...\r"
2014-11-17 15:54:13 +00:00
end
2014-11-06 13:51:15 +00:00
2014-11-17 15:54:13 +00:00
tags = []
begin
response = @github.repos.tags @options[:user], @options[:project]
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
tags.concat(page)
end
print " \r"
if tags.count == 0
puts "Warning: Can't find any tags in repo. Make sure, that you push tags to remote repo via 'git push --tags'".yellow
2015-03-26 21:30:13 +00:00
elsif @options[:verbose]
puts "Found #{tags.count} tags"
end
rescue
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
end
2014-11-06 13:51:15 +00:00
2014-11-17 15:54:13 +00:00
tags
2014-11-17 14:53:47 +00:00
end
2014-11-06 13:51:15 +00:00
2015-03-05 08:33:54 +00:00
def fetch_github_token
env_var = @options[:token] ? @options[:token] : (ENV.fetch "CHANGELOG_GITHUB_TOKEN", nil)
2014-11-06 13:51:15 +00:00
2014-11-17 15:54:13 +00:00
unless env_var
puts "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found.".yellow
puts "This script can make only 50 requests per hour to GitHub API without a token!".yellow
2014-11-17 15:54:13 +00:00
end
2014-11-17 15:54:13 +00:00
@github_token ||= env_var
end
2015-04-03 15:59:37 +00:00
# Generate log only between 2 specified tags
# @param [String] older_tag all issues before this tag date will be excluded. May be nil, if it's first tag
# @param [String] newer_tag all issue after this tag will be excluded. May be nil for unreleased section
2014-11-17 15:54:13 +00:00
def generate_log_between_tags(older_tag, newer_tag)
2015-03-04 19:25:21 +00:00
filtered_pull_requests = delete_by_time(@pull_requests, :actual_date, older_tag, newer_tag)
2015-02-18 16:27:57 +00:00
filtered_issues = delete_by_time(@issues, :actual_date, older_tag, newer_tag)
2014-11-06 13:51:15 +00:00
newer_tag_name = newer_tag.nil? ? nil : newer_tag["name"]
older_tag_name = older_tag.nil? ? nil : older_tag["name"]
2014-12-22 13:14:01 +00:00
if @options[:filter_issues_by_milestone]
2015-03-26 13:43:47 +00:00
# delete excess irrelevant issues (according milestones)
filtered_issues = filter_by_milestone(filtered_issues, newer_tag_name, @issues)
filtered_pull_requests = filter_by_milestone(filtered_pull_requests, newer_tag_name, @pull_requests)
end
2014-12-22 13:31:49 +00:00
2015-02-25 11:52:41 +00:00
if filtered_issues.empty? && filtered_pull_requests.empty? && newer_tag.nil?
2015-02-25 17:02:41 +00:00
# do not generate empty unreleased section
return ""
2015-02-25 11:52:41 +00:00
end
2015-02-18 16:27:57 +00:00
2015-03-26 13:43:47 +00:00
create_log(filtered_pull_requests, filtered_issues, newer_tag, older_tag_name)
end
def filter_by_milestone(filtered_issues, newer_tag_name, src_array)
filtered_issues.select! { |issue|
2015-02-18 16:27:57 +00:00
# leave issues without milestones
if issue.milestone.nil?
true
else
2015-03-26 13:43:47 +00:00
# check, that this milestone in tag list:
2015-02-18 16:27:57 +00:00
@all_tags.find { |tag| tag.name == issue.milestone.title }.nil?
end
}
2015-02-18 16:27:57 +00:00
unless newer_tag_name.nil?
2015-03-26 13:43:47 +00:00
# add missed issues (according milestones)
2015-02-18 16:27:57 +00:00
issues_to_add = src_array.select { |issue|
if issue.milestone.nil?
false
else
2015-03-26 13:43:47 +00:00
# check, that this milestone in tag list:
2015-02-18 16:27:57 +00:00
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
if milestone_is_tag.nil?
false
else
issue.milestone.title == newer_tag_name
end
end
2015-02-18 16:27:57 +00:00
}
2014-11-06 13:51:15 +00:00
2015-02-18 16:27:57 +00:00
filtered_issues |= issues_to_add
end
filtered_issues
2014-11-19 12:14:28 +00:00
end
2014-11-06 13:51:15 +00:00
2015-04-04 00:37:31 +00:00
# Method filter issues, that belong only specified tag range
2015-04-03 16:13:50 +00:00
# @param [Array] array of issues to filter
# @param [Symbol] hash_key key of date value default is :actual_date
# @param [String] older_tag all issues before this tag date will be excluded. May be nil, if it's first tag
# @param [String] newer_tag all issue after this tag will be excluded. May be nil for unreleased section
2015-04-04 00:37:31 +00:00
# @return [Array] filtered issues
2015-04-03 16:13:50 +00:00
def delete_by_time(array, hash_key = :actual_date, older_tag = nil, newer_tag = nil)
2015-04-03 21:08:00 +00:00
fail ChangelogGeneratorError, "At least one of the tags should be not nil!".red if older_tag.nil? && newer_tag.nil?
2015-02-17 17:45:11 +00:00
2015-04-04 00:27:05 +00:00
newer_tag_time = newer_tag && get_time_of_tag(newer_tag)
older_tag_time = older_tag && get_time_of_tag(older_tag)
2015-02-17 17:45:11 +00:00
2014-11-19 15:28:49 +00:00
array.select { |req|
2014-11-19 12:14:28 +00:00
if req[hash_key]
t = Time.parse(req[hash_key]).utc
2014-11-06 13:51:15 +00:00
2014-11-19 12:14:28 +00:00
if older_tag_time.nil?
2014-11-19 15:28:49 +00:00
tag_in_range_old = true
2014-11-19 12:14:28 +00:00
else
2014-11-19 15:28:49 +00:00
tag_in_range_old = t > older_tag_time
2014-11-19 12:14:28 +00:00
end
2014-11-07 15:45:35 +00:00
2015-02-18 16:27:57 +00:00
if newer_tag_time.nil?
tag_in_range_new = true
else
tag_in_range_new = t <= newer_tag_time
end
2014-11-19 15:28:49 +00:00
tag_in_range = (tag_in_range_old) && (tag_in_range_new)
2014-12-03 09:09:11 +00:00
2014-11-19 15:28:49 +00:00
tag_in_range
2014-11-17 15:54:13 +00:00
else
2014-11-19 15:28:49 +00:00
false
2014-11-17 15:54:13 +00:00
end
}
end
2014-11-06 13:51:15 +00:00
2015-03-27 14:48:13 +00:00
# Generates log for section with header and body
#
# @param [Array] pull_requests List or PR's in new section
# @param [Array] issues List of issues in new section
# @param [String] newer_tag Name of the newer tag. Could be nil for `Unreleased` section
# @param [String] older_tag_name Older tag, used for the links. Could be nil for last tag.
# @return [String] Ready and parsed section
2015-02-18 16:27:57 +00:00
def create_log(pull_requests, issues, newer_tag, older_tag_name = nil)
2015-03-26 22:03:49 +00:00
newer_tag_time = newer_tag.nil? ? Time.new : get_time_of_tag(newer_tag)
newer_tag_name = newer_tag.nil? ? @options[:unreleased_label] : newer_tag["name"]
newer_tag_link = newer_tag.nil? ? "HEAD" : newer_tag_name
github_site = options[:github_site] || "https://github.com"
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
2015-03-26 22:08:23 +00:00
log = generate_header(newer_tag_name, newer_tag_link, newer_tag_time, older_tag_name, project_url)
2014-11-07 15:45:35 +00:00
2014-11-17 15:54:13 +00:00
if @options[:issues]
# Generate issues:
2015-02-25 17:02:41 +00:00
issues_a = []
enhancement_a = []
2015-03-26 13:43:47 +00:00
bugs_a = []
2015-02-25 17:02:41 +00:00
2014-11-17 15:54:13 +00:00
issues.each { |dict|
2015-02-25 17:02:41 +00:00
added = false
2014-11-17 15:54:13 +00:00
dict.labels.each { |label|
if label.name == "bug"
2015-02-25 17:02:41 +00:00
bugs_a.push dict
added = true
next
2014-11-17 15:54:13 +00:00
end
if label.name == "enhancement"
2015-02-25 17:02:41 +00:00
enhancement_a.push dict
added = true
next
2014-11-17 15:54:13 +00:00
end
}
2015-02-25 17:02:41 +00:00
unless added
issues_a.push dict
end
2015-02-25 17:02:41 +00:00
}
2014-11-17 15:54:13 +00:00
log += generate_sub_section(enhancement_a, @options[:enhancement_prefix])
log += generate_sub_section(bugs_a, @options[:bug_prefix])
log += generate_sub_section(issues_a, @options[:issue_prefix])
2015-03-03 17:19:08 +00:00
end
2014-11-07 15:45:35 +00:00
2015-03-03 17:19:08 +00:00
if @options[:pulls]
# Generate pull requests:
log += generate_sub_section(pull_requests, @options[:merge_prefix])
2014-11-17 15:54:13 +00:00
end
2015-02-18 16:27:57 +00:00
2014-11-17 15:54:13 +00:00
log
end
2014-11-10 14:13:34 +00:00
# @param [Array] issues List of issues on sub-section
# @param [String] prefix Nae of sub-section
# @return [String] Generate ready-to-go sub-section
def generate_sub_section(issues, prefix)
log = ""
if options[:simple_list] != true && issues.any?
2015-02-25 17:02:41 +00:00
log += "#{prefix}\n\n"
end
if issues.any?
issues.each { |issue|
merge_string = @generator.get_string_for_issue(issue)
log += "- #{merge_string}\n\n"
}
end
log
2015-02-25 17:02:41 +00:00
end
2015-03-27 14:48:13 +00:00
# It generate one header for section with specific parameters.
#
# @param [String] newer_tag_name - name of newer tag
# @param [String] newer_tag_link - used for links. Could be same as #newer_tag_name or some specific value, like HEAD
# @param [Time] newer_tag_time - time, when newer tag created
# @param [String] older_tag_link - tag name, used for links.
# @param [String] project_url - url for current project.
# @return [String] - Generate one ready-to-add section.
def generate_header(newer_tag_name, newer_tag_link, newer_tag_time, older_tag_link, project_url)
log = ""
2015-03-26 22:08:23 +00:00
2015-03-26 13:43:47 +00:00
# Generate date string:
time_string = newer_tag_time.strftime @options[:dateformat]
2015-02-19 15:30:15 +00:00
# Generate tag name and link
if newer_tag_name.equal? @options[:unreleased_label]
2015-03-27 14:48:13 +00:00
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_link})\n\n"
else
2015-03-27 14:48:13 +00:00
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_link}) (#{time_string})\n\n"
end
2015-02-19 15:30:15 +00:00
2015-03-27 14:48:13 +00:00
if @options[:compare_link] && older_tag_link
2015-02-19 15:30:15 +00:00
# Generate compare link
2015-03-27 14:48:13 +00:00
log += "[Full Changelog](#{project_url}/compare/#{older_tag_link}...#{newer_tag_link})\n\n"
2015-02-19 15:30:15 +00:00
end
2015-02-25 11:52:41 +00:00
2015-02-19 15:30:15 +00:00
log
end
2015-04-04 00:27:05 +00:00
# Try to find tag date in local hash.
# Otherwise fFetch tag time and put it to local hash file.
# @param [String] tag_name name of the tag
# @param [Hash] tag_times_hash the hash of tag times
# @return [Time] time of specified tag
2015-02-17 19:18:52 +00:00
def get_time_of_tag(tag_name, tag_times_hash = @tag_times_hash)
2015-04-04 00:27:05 +00:00
fail ChangelogGeneratorError, "tag_name is nil".red if tag_name.nil?
2014-11-07 15:45:35 +00:00
if tag_times_hash[tag_name["name"]]
return @tag_times_hash[tag_name["name"]]
2014-11-17 15:54:13 +00:00
end
2014-11-06 13:51:15 +00:00
begin
github_git_data_commits_get = @github.git_data.commits.get @options[:user], @options[:project], tag_name["commit"]["sha"]
rescue
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
end
time_string = github_git_data_commits_get["committer"]["date"]
@tag_times_hash[tag_name["name"]] = Time.parse(time_string)
2014-11-06 13:51:15 +00:00
end
2015-02-18 20:21:00 +00:00
def get_filtered_issues
2015-04-04 00:10:16 +00:00
filtered_issues = include_issues_by_labels(@issues)
2014-11-17 14:53:47 +00:00
2015-04-03 21:22:40 +00:00
filtered_issues = exclude_issues_by_labels(filtered_issues)
2015-02-17 20:39:37 +00:00
if @options[:verbose]
2015-02-18 20:21:00 +00:00
puts "Filtered issues: #{filtered_issues.count}"
2015-02-17 20:39:37 +00:00
end
filtered_issues
end
# This method fetch all closed issues and separate them to pull requests and pure issues
# (pull request is kind of issue in term of GitHub)
# @return [Tuple] with issues and pull requests
2015-02-18 21:32:40 +00:00
def fetch_issues_and_pull_requests
2015-02-18 20:21:00 +00:00
if @options[:verbose]
print "Fetching closed issues...\r"
end
issues = []
begin
response = @github.issues.list user: @options[:user], repo: @options[:project], state: "closed", filter: "all", labels: nil
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
issues.concat(page)
break if @options[:max_issues] && issues.length >= @options[:max_issues]
end
rescue
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
end
2015-02-18 20:21:00 +00:00
print " \r"
2015-02-18 20:21:00 +00:00
if @options[:verbose]
puts "Received issues: #{issues.count}"
end
# remove pull request from issues:
issues.partition { |x|
x[:pull_request].nil?
2015-02-18 21:32:40 +00:00
}
2015-02-18 20:21:00 +00:00
end
2015-03-04 19:25:21 +00:00
def fetch_event_for_issues_and_pr
2014-11-17 15:54:13 +00:00
if @options[:verbose]
2015-03-04 19:25:21 +00:00
print "Fetching events for issues and PR: 0/#{@issues.count + @pull_requests.count}\r"
2015-02-17 19:16:58 +00:00
end
# Async fetching events:
2015-03-06 12:32:02 +00:00
fetch_events_async(@issues + @pull_requests)
2014-11-07 15:45:35 +00:00
end
2015-03-06 12:32:02 +00:00
def fetch_events_async(issues)
i = 0
max_thread_number = 50
2015-03-06 12:32:02 +00:00
threads = []
issues.each_slice(max_thread_number) { |issues_slice|
issues_slice.each { |issue|
threads << Thread.new {
begin
obj = @github.issues.events.list user: @options[:user], repo: @options[:project], issue_number: issue["number"]
rescue
puts GH_RATE_LIMIT_EXCEEDED_MSG.yellow
end
2015-03-06 12:32:02 +00:00
issue[:events] = obj.body
2015-03-26 13:43:47 +00:00
print "Fetching events for issues and PR: #{i + 1}/#{@issues.count + @pull_requests.count}\r"
i += 1
2015-03-06 12:32:02 +00:00
}
}
2015-03-26 13:43:47 +00:00
threads.each(&:join)
2015-03-06 12:32:02 +00:00
threads = []
}
2015-03-26 13:43:47 +00:00
# to clear line from prev print
print " \r"
if @options[:verbose]
puts "Fetching events for issues and PR: #{i} Done!"
end
2015-03-06 12:32:02 +00:00
end
end
2015-03-26 13:43:47 +00:00
if __FILE__ == $PROGRAM_NAME
2015-03-26 20:47:17 +00:00
GitHubChangelogGenerator::ChangelogGenerator.new.compound_changelog
2014-11-17 15:54:13 +00:00
end
end