Merge pull request #833 from warpc/823-publication-groups-of-build-lists
[refs #823]: Association of packages in the group to expedite the publication
This commit is contained in:
commit
7b5af80d20
|
@ -151,8 +151,6 @@ class BuildList < ActiveRecord::Base
|
||||||
after_transition :on => :published, :do => [:set_version_and_tag, :actualize_packages]
|
after_transition :on => :published, :do => [:set_version_and_tag, :actualize_packages]
|
||||||
after_transition :on => :cancel, :do => [:cancel_job],
|
after_transition :on => :cancel, :do => [:cancel_job],
|
||||||
:if => lambda { |build_list| build_list.new_core? }
|
:if => lambda { |build_list| build_list.new_core? }
|
||||||
after_transition :on => :publish, :do => [:publish_container],
|
|
||||||
:if => lambda { |build_list| build_list.new_core? }
|
|
||||||
|
|
||||||
after_transition :on => [:published, :fail_publish, :build_error], :do => :notify_users
|
after_transition :on => [:published, :fail_publish, :build_error], :do => :notify_users
|
||||||
after_transition :on => :build_success, :do => :notify_users,
|
after_transition :on => :build_success, :do => :notify_users,
|
||||||
|
@ -271,44 +269,6 @@ class BuildList < ActiveRecord::Base
|
||||||
can_publish? and not save_to_repository.publish_without_qa
|
can_publish? and not save_to_repository.publish_without_qa
|
||||||
end
|
end
|
||||||
|
|
||||||
def publish_container
|
|
||||||
type = build_for_platform.distrib_type
|
|
||||||
archive = results.find{ |r| r['file_name'] =~ /.*\.tar\.gz$/ }
|
|
||||||
|
|
||||||
platform_path = "#{save_to_platform.path}/repository"
|
|
||||||
if save_to_platform.personal?
|
|
||||||
platform_path << '/'
|
|
||||||
platform_path << build_for_platform.name
|
|
||||||
Dir.mkdir(platform_path) unless File.exists?(platform_path)
|
|
||||||
end
|
|
||||||
|
|
||||||
packages = last_published.includes(:packages).limit(5).map{ |bl| bl.packages }.flatten
|
|
||||||
sources = packages.map{ |p| p.fullname if p.package_type == 'source' }.compact
|
|
||||||
binaries = packages.map{ |p| p.fullname if p.package_type == 'binary' }.compact
|
|
||||||
|
|
||||||
Resque.push(
|
|
||||||
worker_queue_with_priority("publish_#{type}_worker"),
|
|
||||||
'class' => worker_queue_class("AbfWorker::Publish#{type.capitalize}Worker"),
|
|
||||||
'args' => [{
|
|
||||||
:id => id,
|
|
||||||
:arch => arch.name,
|
|
||||||
:distrib_type => type,
|
|
||||||
:container_sha1 => archive['sha1'],
|
|
||||||
:packages => { :sources => sources, :binaries => binaries },
|
|
||||||
:platform => {
|
|
||||||
:platform_path => platform_path,
|
|
||||||
:released => save_to_platform.released
|
|
||||||
},
|
|
||||||
:repository => {
|
|
||||||
:name => save_to_repository.name,
|
|
||||||
:id => save_to_repository.id
|
|
||||||
},
|
|
||||||
:type => :publish,
|
|
||||||
:time_living => 2400 # 40 min
|
|
||||||
}]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def add_to_queue
|
def add_to_queue
|
||||||
if new_core?
|
if new_core?
|
||||||
# TODO: Investigate: why 2 tasks will be created without checking @state
|
# TODO: Investigate: why 2 tasks will be created without checking @state
|
||||||
|
@ -423,6 +383,15 @@ class BuildList < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def last_published
|
||||||
|
BuildList.where(:project_id => self.project_id,
|
||||||
|
:save_to_repository_id => self.save_to_repository_id)
|
||||||
|
.for_platform(self.build_for_platform_id)
|
||||||
|
.scoped_to_arch(self.arch_id)
|
||||||
|
.for_status(BUILD_PUBLISHED)
|
||||||
|
.recent
|
||||||
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
||||||
def abf_worker_priority
|
def abf_worker_priority
|
||||||
|
@ -508,12 +477,4 @@ class BuildList < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_published
|
|
||||||
BuildList.where(:project_id => self.project_id,
|
|
||||||
:save_to_repository_id => self.save_to_repository_id)
|
|
||||||
.for_platform(self.build_for_platform_id)
|
|
||||||
.scoped_to_arch(self.arch_id)
|
|
||||||
.for_status(BUILD_PUBLISHED)
|
|
||||||
.recent
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,6 +11,7 @@ class BuildList::Package < ActiveRecord::Base
|
||||||
:package_type, :name, :release, :version,
|
:package_type, :name, :release, :version,
|
||||||
:presence => true
|
:presence => true
|
||||||
validates :package_type, :inclusion => PACKAGE_TYPES
|
validates :package_type, :inclusion => PACKAGE_TYPES
|
||||||
|
validates :sha1, :presence => true, :if => Proc.new { |p| p.build_list.new_core? }
|
||||||
|
|
||||||
default_scope order('lower(name) ASC, length(name) ASC')
|
default_scope order('lower(name) ASC, length(name) ASC')
|
||||||
|
|
||||||
|
|
|
@ -167,7 +167,10 @@
|
||||||
%tbody
|
%tbody
|
||||||
- @build_list.packages.each do |package|
|
- @build_list.packages.each do |package|
|
||||||
%tr
|
%tr
|
||||||
%td= package.fullname
|
- if package.sha1.present?
|
||||||
|
%td= link_to package.fullname, "#{APP_CONFIG['file_store_url']}/api/v1/file_stores/#{package.sha1}"
|
||||||
|
- else
|
||||||
|
%td= package.fullname
|
||||||
%td= package.name
|
%td= package.name
|
||||||
%td= package.version
|
%td= package.version
|
||||||
%td= package.release
|
%td= package.release
|
||||||
|
|
|
@ -22,3 +22,7 @@ end
|
||||||
every 1.day, :at => '3:00 am' do
|
every 1.day, :at => '3:00 am' do
|
||||||
rake "activity_feeds:clear", :output => 'log/activity_feeds.log'
|
rake "activity_feeds:clear", :output => 'log/activity_feeds.log'
|
||||||
end
|
end
|
||||||
|
|
||||||
|
every 3.minute do
|
||||||
|
runner 'AbfWorker::BuildListsPublishTaskManager.new.run', :output => 'log/task_manager.log'
|
||||||
|
end
|
|
@ -0,0 +1,5 @@
|
||||||
|
class AbbSha1ToBuildListPackage < ActiveRecord::Migration
|
||||||
|
def change
|
||||||
|
add_column :build_list_packages, :sha1, :string
|
||||||
|
end
|
||||||
|
end
|
|
@ -11,7 +11,7 @@
|
||||||
#
|
#
|
||||||
# It's strongly recommended to check this file into your version control system.
|
# It's strongly recommended to check this file into your version control system.
|
||||||
|
|
||||||
ActiveRecord::Schema.define(:version => 20121225103309) do
|
ActiveRecord::Schema.define(:version => 20130110104600) do
|
||||||
|
|
||||||
create_table "activity_feeds", :force => true do |t|
|
create_table "activity_feeds", :force => true do |t|
|
||||||
t.integer "user_id", :null => false
|
t.integer "user_id", :null => false
|
||||||
|
@ -94,6 +94,7 @@ ActiveRecord::Schema.define(:version => 20121225103309) do
|
||||||
t.datetime "created_at", :null => false
|
t.datetime "created_at", :null => false
|
||||||
t.datetime "updated_at", :null => false
|
t.datetime "updated_at", :null => false
|
||||||
t.boolean "actual", :default => false
|
t.boolean "actual", :default => false
|
||||||
|
t.string "sha1"
|
||||||
end
|
end
|
||||||
|
|
||||||
add_index "build_list_packages", ["actual", "platform_id"], :name => "index_build_list_packages_on_actual_and_platform_id"
|
add_index "build_list_packages", ["actual", "platform_id"], :name => "index_build_list_packages_on_actual_and_platform_id"
|
||||||
|
|
|
@ -0,0 +1,119 @@
|
||||||
|
# -*- encoding : utf-8 -*-
|
||||||
|
module AbfWorker
|
||||||
|
class BuildListsPublishTaskManager
|
||||||
|
REDIS_MAIN_KEY = 'abf-worker::build-lists-publish-task-manager::'
|
||||||
|
LOCKED_REP_AND_PLATFORMS = "#{REDIS_MAIN_KEY}locked-repositories-and-platforms"
|
||||||
|
LOCKED_BUILD_LISTS = "#{REDIS_MAIN_KEY}locked-build-lists"
|
||||||
|
|
||||||
|
def initialize
|
||||||
|
@redis = Resque.redis
|
||||||
|
@workers_count = APP_CONFIG['abf_worker']['publish_workers_count']
|
||||||
|
end
|
||||||
|
|
||||||
|
def run
|
||||||
|
available_repos = BuildList.
|
||||||
|
select('MIN(updated_at) as min_updated_at, save_to_repository_id, build_for_platform_id').
|
||||||
|
where(:new_core => true, :status => BuildList::BUILD_PUBLISH).
|
||||||
|
group(:save_to_repository_id, :build_for_platform_id).
|
||||||
|
order(:min_updated_at).
|
||||||
|
limit(@workers_count * 2) # because some repos may be locked
|
||||||
|
|
||||||
|
counter = 1
|
||||||
|
|
||||||
|
# looks like:
|
||||||
|
# ['save_to_repository_id-build_for_platform_id', ...]
|
||||||
|
locked_rep_and_pl = @redis.lrange(LOCKED_REP_AND_PLATFORMS, 0, -1)
|
||||||
|
available_repos.each do |el|
|
||||||
|
key = "#{el.save_to_repository_id}-#{el.build_for_platform_id}"
|
||||||
|
next if locked_rep_and_pl.include?(key)
|
||||||
|
break if counter > @workers_count
|
||||||
|
if create_task(el.save_to_repository_id, el.build_for_platform_id)
|
||||||
|
@redis.lpush(LOCKED_REP_AND_PLATFORMS, key)
|
||||||
|
counter += 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.unlock_build_list(build_list)
|
||||||
|
Resque.redis.lrem(LOCKED_BUILD_LISTS, 0, build_list.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.unlock_rep_and_platform(build_list)
|
||||||
|
key = "#{build_list.save_to_repository_id}-#{build_list.build_for_platform_id}"
|
||||||
|
Resque.redis.lrem(LOCKED_REP_AND_PLATFORMS, 0, key)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def create_task(save_to_repository_id, build_for_platform_id)
|
||||||
|
build_lists = BuildList.
|
||||||
|
where(:new_core => true, :status => BuildList::BUILD_PUBLISH).
|
||||||
|
where(:save_to_repository_id => save_to_repository_id).
|
||||||
|
where(:build_for_platform_id => build_for_platform_id).
|
||||||
|
order(:updated_at)
|
||||||
|
locked_ids = @redis.lrange(LOCKED_BUILD_LISTS, 0, -1)
|
||||||
|
build_lists = build_lists.where('build_lists.id NOT IN (?)', locked_ids) unless locked_ids.empty?
|
||||||
|
|
||||||
|
bl = build_lists.first
|
||||||
|
return false unless bl
|
||||||
|
|
||||||
|
platform_path = "#{bl.save_to_platform.path}/repository"
|
||||||
|
if bl.save_to_platform.personal?
|
||||||
|
platform_path << '/' << bl.build_for_platform.name
|
||||||
|
system "mkdir -p #{platform_path}"
|
||||||
|
end
|
||||||
|
worker_queue = bl.worker_queue_with_priority("publish_worker")
|
||||||
|
worker_class = bl.worker_queue_class("AbfWorker::PublishWorker")
|
||||||
|
|
||||||
|
options = {
|
||||||
|
:id => bl.id,
|
||||||
|
:arch => bl.arch.name,
|
||||||
|
:distrib_type => bl.build_for_platform.distrib_type,
|
||||||
|
:platform => {
|
||||||
|
:platform_path => platform_path,
|
||||||
|
:released => bl.save_to_platform.released
|
||||||
|
},
|
||||||
|
:repository => {
|
||||||
|
:name => bl.save_to_repository.name,
|
||||||
|
:id => bl.save_to_repository.id
|
||||||
|
},
|
||||||
|
:type => :publish,
|
||||||
|
:time_living => 2400 # 40 min
|
||||||
|
}
|
||||||
|
|
||||||
|
packages = {:sources => [], :binaries => {:x86_64 => [], :i586 => []}}
|
||||||
|
old_packages = {:sources => [], :binaries => {:x86_64 => [], :i586 => []}}
|
||||||
|
build_list_ids = []
|
||||||
|
|
||||||
|
new_sources = {}
|
||||||
|
build_lists.each do |bl|
|
||||||
|
# remove duplicates of sources for different arches
|
||||||
|
bl.packages.by_package_type('source').each{ |s| new_sources["#{s.fullname}"] = s.sha1 }
|
||||||
|
fill_packages(bl, packages)
|
||||||
|
bl.last_published.includes(:packages).limit(5).each{ |old_bl|
|
||||||
|
fill_packages(old_bl, old_packages, :fullname)
|
||||||
|
}
|
||||||
|
build_list_ids << bl.id
|
||||||
|
@redis.lpush(LOCKED_BUILD_LISTS, bl.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
packages[:sources] = new_sources.values
|
||||||
|
Resque.push(
|
||||||
|
worker_queue,
|
||||||
|
'class' => worker_class,
|
||||||
|
'args' => [options.merge({
|
||||||
|
:packages => packages,
|
||||||
|
:old_packages => old_packages,
|
||||||
|
:build_list_ids => build_list_ids
|
||||||
|
})]
|
||||||
|
)
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
|
||||||
|
def fill_packages(bl, results_map, field = :sha1)
|
||||||
|
results_map[:sources] |= bl.packages.by_package_type('source').pluck(field) if field != :sha1
|
||||||
|
results_map[:binaries][bl.arch.name.to_sym] |= bl.packages.by_package_type('binary').pluck(field)
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
|
@ -30,8 +30,6 @@ module AbfWorker
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
|
||||||
|
|
||||||
def worker_queue_with_priority(queue = nil)
|
def worker_queue_with_priority(queue = nil)
|
||||||
queue ||= abf_worker_base_queue
|
queue ||= abf_worker_base_queue
|
||||||
queue << '_' << abf_worker_priority if abf_worker_priority.present?
|
queue << '_' << abf_worker_priority if abf_worker_priority.present?
|
||||||
|
|
|
@ -3,26 +3,26 @@ module AbfWorker
|
||||||
@queue = :publish_observer
|
@queue = :publish_observer
|
||||||
|
|
||||||
def self.perform(options)
|
def self.perform(options)
|
||||||
bl = BuildList.find options['id']
|
|
||||||
status = options['status'].to_i
|
status = options['status'].to_i
|
||||||
case status
|
return if status == STARTED # do nothing when publication started
|
||||||
when COMPLETED
|
build_lists = BuildList.where(:id => options['build_list_ids'])
|
||||||
bl.published
|
build_lists.each do |bl|
|
||||||
update_results(bl, options)
|
|
||||||
when FAILED
|
|
||||||
bl.fail_publish
|
|
||||||
update_results(bl, options)
|
|
||||||
when CANCELED
|
|
||||||
bl.fail_publish
|
|
||||||
update_results(bl, options)
|
update_results(bl, options)
|
||||||
|
case status
|
||||||
|
when COMPLETED
|
||||||
|
bl.published
|
||||||
|
when FAILED, CANCELED
|
||||||
|
bl.fail_publish
|
||||||
|
end
|
||||||
|
AbfWorker::BuildListsPublishTaskManager.unlock_build_list bl
|
||||||
end
|
end
|
||||||
|
AbfWorker::BuildListsPublishTaskManager.unlock_rep_and_platform build_lists.first
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.update_results(subject, options)
|
def self.update_results(subject, options)
|
||||||
results = (subject.results || []).
|
results = (subject.results || []).
|
||||||
map{ |r| r if r['file_name'] !~ /^abfworker\:\:publish\-worker.*\.log$/ }.
|
select{ |r| r['file_name'] !~ /^abfworker\:\:publish\-worker.*\.log$/ }
|
||||||
compact
|
results |= options['results']
|
||||||
results += options['results']
|
|
||||||
sort_results_and_save(subject, results)
|
sort_results_and_save(subject, results)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -46,14 +46,6 @@ module AbfWorker
|
||||||
package.platform_id = bl.save_to_platform_id
|
package.platform_id = bl.save_to_platform_id
|
||||||
package.save!
|
package.save!
|
||||||
end
|
end
|
||||||
|
|
||||||
container = (options['results'] || []).
|
|
||||||
select{ |r| r['file_name'] !~ /.*\.log$/ }.first
|
|
||||||
sha1 = container ? container['sha1'] : nil
|
|
||||||
if sha1
|
|
||||||
bl.container_path = "#{APP_CONFIG['file_store_url']}/api/v1/file_stores/#{sha1}"
|
|
||||||
bl.save!
|
|
||||||
end
|
|
||||||
update_results(bl, options)
|
update_results(bl, options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -34,7 +34,7 @@ namespace :new_core do
|
||||||
|
|
||||||
sha1 = bl.results.find{ |r| r['file_name'] =~ /.*\.tar\.gz$/ }['sha1']
|
sha1 = bl.results.find{ |r| r['file_name'] =~ /.*\.tar\.gz$/ }['sha1']
|
||||||
|
|
||||||
system "cd #{platform_repository_folder} && curl -L -O http://file-store.rosalinux.ru/api/v1/file_stores/#{sha1}"
|
system "cd #{platform_repository_folder} && curl -L -O #{APP_CONFIG['file_store_url']}/api/v1/file_stores/#{sha1}"
|
||||||
system "cd #{platform_repository_folder} && tar -xzf #{sha1}"
|
system "cd #{platform_repository_folder} && tar -xzf #{sha1}"
|
||||||
system "rm -f #{platform_repository_folder}/#{sha1}"
|
system "rm -f #{platform_repository_folder}/#{sha1}"
|
||||||
|
|
||||||
|
@ -54,4 +54,55 @@ namespace :new_core do
|
||||||
say "[#{Time.zone.now}] done"
|
say "[#{Time.zone.now}] done"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
desc 'Extracts all rpms from BuildList container and updates BuildList::Package#sha1 field'
|
||||||
|
task :update_packages => :environment do
|
||||||
|
say "[#{Time.zone.now}] Starting to extract rpms..."
|
||||||
|
|
||||||
|
token = User.find_by_uname('rosa_system').authentication_token
|
||||||
|
BuildList.where(:new_core => true).
|
||||||
|
where(:status => [
|
||||||
|
BuildServer::SUCCESS,
|
||||||
|
BuildList::FAILED_PUBLISH,
|
||||||
|
BuildList::BUILD_PUBLISHED,
|
||||||
|
BuildList::BUILD_PUBLISH
|
||||||
|
]).
|
||||||
|
order(:id).
|
||||||
|
find_in_batches(:batch_size => 100) do | build_lists |
|
||||||
|
|
||||||
|
build_lists.each do | bl |
|
||||||
|
puts "[#{Time.zone.now}] - where build_lists.id #{bl.id}"
|
||||||
|
|
||||||
|
sha1 = (bl.results.find{ |r| r['file_name'] =~ /.*\.tar\.gz$/ } || {})['sha1']
|
||||||
|
next unless sha1
|
||||||
|
dir = Dir.mktmpdir('update-packages-', "#{APP_CONFIG['root_path']}")
|
||||||
|
begin
|
||||||
|
system "cd #{dir} && curl -L -O #{APP_CONFIG['file_store_url']}/api/v1/file_stores/#{sha1}; tar -xzf #{sha1}"
|
||||||
|
system "rm -f #{dir}/#{sha1}"
|
||||||
|
|
||||||
|
extract_rpms_and_update_packages("#{dir}/archives/SRC_RPM", bl, 'source', token)
|
||||||
|
extract_rpms_and_update_packages("#{dir}/archives/RPM", bl, 'binary', token)
|
||||||
|
ensure
|
||||||
|
# remove the directory.
|
||||||
|
FileUtils.remove_entry_secure dir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
say "[#{Time.zone.now}] done"
|
||||||
|
end
|
||||||
|
|
||||||
|
def extract_rpms_and_update_packages(dir, bl, package_type, token)
|
||||||
|
Dir.glob("#{dir}/*.rpm") do |rpm_file|
|
||||||
|
fullname = File.basename rpm_file
|
||||||
|
package = bl.packages.by_package_type(package_type).find{ |p| p.fullname == fullname }
|
||||||
|
next unless package
|
||||||
|
|
||||||
|
package.sha1 = Digest::SHA1.file(rpm_file).hexdigest
|
||||||
|
if %x[ curl #{APP_CONFIG['file_store_url']}/api/v1/file_stores.json?hash=#{package.sha1} ] == '[]'
|
||||||
|
system "curl --user #{token}: -POST -F 'file_store[file]=@#{rpm_file}' #{APP_CONFIG['file_store_url']}/api/v1/upload"
|
||||||
|
end
|
||||||
|
package.save!
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
Loading…
Reference in New Issue