def execute(sync=false)
# create a thread for job
- @thread = Thread.new {
+ @thread = Thread.new do
begin
job_main()
@log.error e.message
@log.error e.backtrace.inspect
end
- }
+ end
if sync then
@thread.join
# distributions
db.do "DELETE FROM distributions"
- @distributions.each { |dist|
+ @distributions.each do |dist|
dist_addr = dist.pkgsvr_ip + ":" + dist.pkgsvr_port.to_s
db.do "INSERT INTO distributions VALUES('#{dist.name}','#{dist.pkgsvr_url}','#{dist_addr}')"
- }
+ end
rescue DBI::DatabaseError => e
puts "DB update failed!"
puts e.errstr
# distributions
rs = db.execute "SELECT * FROM distributions"
- rs.fetch_hash { |row|
+ rs.fetch_hash do |row|
pkgsvr_ip = row['pkgsvr_addr'].split(":")[0]
pkgsvr_port = row['pkgsvr_addr'].split(":")[1].to_i
add_distribution_internal(row['name'], row['pkgsvr_url'], pkgsvr_ip, pkgsvr_port)
- }
+ end
rs.finish
rescue DBI::DatabaseError => e
puts "DB loading failed!"
file_path = "#{BuildServer::CONFIG_ROOT}/#{@parent.id}/latest_job"
if File.exist? file_path then
latest_idx = -1
- File.open( file_path, "r" ) { |f|
- f.each_line { |l|
+ File.open( file_path, "r" ) do |f|
+ f.each_line do |l|
latest_idx = l.strip.to_i
break
- }
- }
+ end
+ end
if latest_idx < 0 then latest_idx = -1 end
@new_job_index = latest_idx + 1
else
# get new id
def get_new_job_id
new_idx = 0
- @latest_job_touch.synchronize {
+ @latest_job_touch.synchronize do
new_idx = @new_job_index
file_path = "#{BuildServer::CONFIG_ROOT}/#{@parent.id}/latest_job"
- File.open( file_path, "w" ) { |f|
+ File.open( file_path, "w" ) do |f|
f.puts "#{@new_job_index}"
- }
+ end
@new_job_index += 1
- }
+ end
return new_idx
end
# intialize normal job
def initialize_job ( job )
job.status = "INITIALIZING"
- Thread.new {
+ Thread.new do
begin
# init
if not job.init or job.status == "ERROR" then
@parent.log.error e.message
@parent.log.error e.backtrace.inspect
end
- }
+ end
@parent.log.info "Job \"#{job.id}\" entered INITIALIZING status"
end
def cancel_job( job)
job.cancel_state = "WORKING"
@parent.log.info "Creating thread for canceling the job \"#{job.id}\""
- Thread.new {
+ Thread.new do
begin
#terminate job thread
if not job.thread.nil? then
@parent.log.error e.message
@parent.log.error e.backtrace.inspect
end
- }
+ end
end
# handle
if not selected_job.nil? then return selected_job end
# if no reverse build job exist!
- @internal_job_schedule.synchronize {
+ @internal_job_schedule.synchronize do
# internal job first
ret = nil
if @internal_jobs.count > 0 then
end
return ret
- }
+ end
end
# get project from git repository
def get_git_project( repos, dist_name )
- @projects.each { |prj|
+ @projects.each do |prj|
# check project's distribution
if prj.dist_name != dist_name then next end
if prj.type == "GIT" and prj.repository == repos then
return prj
end
- }
+ end
return nil
end
db.do "DELETE FROM project_os"
# insert project info
- @projects.each { |prj|
+ @projects.each do |prj|
# COMMON
db.do "INSERT INTO projects (name,type,password,dist_name) VALUES('#{prj.name}', '#{prj.type}', '#{prj.passwd}', '#{prj.dist_name}')"
# OS
prj_id = db.select_one("select last_insert_rowid()")[0]
- prj.os_list.each { |os|
+ prj.os_list.each do |os|
db.do "INSERT INTO project_os VALUES('#{prj_id}','#{os}')"
- }
+ end
if not prj.save_db() then raise RuntimeError ,"Save project DB is failed!" end
- }
+ end
rescue DBI::DatabaseError => e
puts "DB update failed!"
puts e.errstr
# distributions
rs = db.execute "SELECT * FROM projects"
- rs.fetch_hash { |row|
+ rs.fetch_hash do |row|
prj_id = row['id']
prj_name = row['name']
prj_type = row['type']
# os
prj_os_list = []
rs2 = db.execute "SELECT os_name FROM project_os WHERE prj_id = #{prj_id}"
- rs2.fetch { |row2|
+ rs2.fetch do |row2|
prj_os_list.push row2[0]
- }
+ end
rs2.finish
if prj_type == "GIT" then
new_project = GitBuildProject.new(prj_name, @server, prj_os_list, prj_dist)
if not new_project.load_db() then raise RuntimeError, "Project DB load failed!" end
@projects.push new_project
- }
+ end
rs.finish
rescue DBI::DatabaseError => e
puts "DB loading failed!"
# start listening
def start()
- @thread = Thread.new {
+ @thread = Thread.new do
# make loop recover when unhandled exception occurred
while not @finish_loop
begin
@log.error e.backtrace.inspect
end
end
- }
+ end
end
handle_cmd_register( req_line, req )
@log.info "Handled REQ: #{req_line}"
when "DOWNLOAD"
- Thread.new {
+ Thread.new do
begin
handle_cmd_download( req_line, req )
rescue => e
@log.error e.message
@log.error e.backtrace.inspect
end
- }
+ end
when "UPLOAD"
- Thread.new {
+ Thread.new do
begin
handle_cmd_upload( req_line, req )
rescue => e
@log.error e.message
@log.error e.backtrace.inspect
end
- }
+ end
else
@log.info "Received Unknown REQ: #{req_line}"
raise "Unknown request: #{req_line}"
if project_name_list.count > 1 or os_list.count > 1 then
new_job_list = []
i = 0
- project_name_list.each { |pname|
+ project_name_list.each do |pname|
if not passwd_list[i].nil? then passwd = passwd_list[i]
else passwd = passwd_list[0] end
check_build_project(pname,passwd,dist_name,req)
- os_list.each { |os|
+ os_list.each do |os|
new_job = create_new_job( pname, os, dist_name )
if new_job.nil? then
@log.warn "\"#{pname}\" does not support #{os}"
end
new_job_list.push new_job
@log.info "Received a request for building this project : #{pname}, #{os}"
- }
+ end
i = i + 1
- }
+ end
if new_job_list.count > 1 then
new_job = @parent_server.prjmgr.create_new_multi_build_job( new_job_list )
new_job.set_internal_job( dock_num )
new_job.git_commit = git_commit
incoming_dir = "#{@parent_server.transport_path}/#{dock_num}"
- pkg_files.each { |file|
+ pkg_files.each do |file|
new_job.add_external_package( file )
- }
+ end
return new_job
end
result += win_pids
# gather MinGW/MSYS process id
- Hash[*`ps -e`.scan(/^[\t\s]*(\d+)[\t\s]+(\d+)/).flatten.map{|x| x.to_i}].each {|pid,ppid|
+ Hash[*`ps -e`.scan(/^[\s]*(\d+)[\s]+(\d+)/).flatten.map{|x| x.to_i}].each {|pid,ppid|
descendants[ppid] << descendants[pid]
}
@parent_server.reload_dist_package()
snapshot_name = @parent_server.register( file_path_list, dist_name, true, false, true)
BuildCommServer.send(req,"SUCC|#{snapshot_name}")
- rescue => e
- @log.error "register failed"
- @log.error e.message
- @log.error e.backtrace.inspect
- BuildCommServer.send(req, "ERROR|#{e.message}")
- @parent_server.release_lock_file
- return
- end
+ rescue => e
+ @log.error "register failed"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ BuildCommServer.send(req, "ERROR|#{e.message}")
+ @parent_server.release_lock_file
+ return
+ end
}
if not dock_name.empty? then
$filemove_mutex.synchronize {
if not File.exist? distfile then
Utils.execute_shell("mv #{filepath} #{distfile}")
- else
- Utils.execute_shell("rm -f #{filepath}")
- return distfile
- end
+ else
+ Utils.execute_shell("rm -f #{filepath}")
+ return distfile
+ end
}
if File.exist? distfile then return distfile
# version check and if existing version is higher then upload version?
if (not exist_pkg.nil?) and (not internal_flag) then
- if not ( Utils.compare_version( exist_pkg.version, pkg.version ) == 1 ) then
+ if Utils.compare_version( exist_pkg.version, pkg.version ) != 1 then
raise RuntimeError, "existing package's version is higher then register package : [#{pkg.package_name}] in [#{pkg.os}]"
end
end
pkg_name_list.each do |package_name|
removed_flag = false
- os_list.each do |os|
- if not @support_os_list.include? os then
- @log.error( "package server does not support input os : #{os}")
+ os_list.each do |o|
+ if not @support_os_list.include? o then
+ @log.error( "package server does not support input os : #{o}")
next
end
- if @pkg_hash_os[os].key?(package_name) then
- @log.info( "remove package [#{package_name}] in #{os}", Log::LV_USER)
- @pkg_hash_os[os].delete(package_name)
+ if @pkg_hash_os[o].key?(package_name) then
+ @log.info( "remove package [#{package_name}] in #{o}", Log::LV_USER)
+ @pkg_hash_os[o].delete(package_name)
removed_flag = true
end
end
# update pkg_list file
- os_list.each do |os|
- write_pkg_list(os)
+ os_list.each do |o|
+ write_pkg_list(o)
end
write_archive_pkg_list
end