client.send "QUERY|OS"
result0 = client.receive_data()
if not result0.nil? then
- for item in result0
+ result0.each do |item|
puts "#{item.strip}"
end
end
client.terminate
exit 1
end
- for item in result1
+ result1.each do |item|
tok = item.split(",").map { |x| x.strip }
type = (tok[0]=="G" ? "NORMAL":"REMOTE")
printf("%-25s %s\n",tok[1],type)
client.terminate
exit 1
end
- for item in result1
+ result1.each do |item|
tok = item.split(",").map { |x| x.strip }
if tok[3].nil? then
puts "#{tok[1]} #{tok[0]} #{tok[2]}"
# upload to ftp server
ftp_filepath = nil
- for attempt in ATTEMPTS
+ ATTEMPTS.each do |attempt|
ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file, @log)
if !ftp_filepath.nil? then break;
else @log.info "Server is the #{attempt} upload attempt fails" end
# download from ftp server
dst_filepath = nil
- for attempt in ATTEMPTS
+ for attempt in ATTEMPTS
dst_filepath = FileTransfer.getfile(ip, port, username, passwd, filepath, dst_file, @log)
if not dst_filepath.nil? then break
else "Server is the #{attempt} download attempt fails" end
if line.strip == "READY" then
@log.info "Server is ready to receive file"
ftp_filepath = nil
- for attempt in ATTEMPTS
+ for attempt in ATTEMPTS
ftp_filepath = FileTransfer.putfile(ip, port, username, passwd, src_file, @log)
if !ftp_filepath.nil? then break;
else @log.info "Client is the #{attempt} upload attempt fails" end
ftp_filepath = tok[1].strip
@log.info "Server uploaded file sucessfully"
dst_filepath = nil
- for attempt in ATTEMPTS
+ ATTEMPTS.each do |attempt|
dst_filepath = FileTransfer.getfile(ip, port, username, passwd, ftp_filepath, dst_file, @log)
if not dst_filepath.nil? then break
else "Client is the #{attempt} download attempt fails" end
#cancel
def cancel()
# cancel all its reverse job
- for job in @server.jobmgr.reverse_build_jobs
+ @server.jobmgr.reverse_build_jobs.each do |job|
if job.get_parent_job() == self and job.cancel_state == "NONE" then
job.cancel_state = "INIT"
end
def can_be_built_on?(host_os)
if @pkginfo.nil? then return false end
- for pkg in @pkginfo.packages
+ @pkginfo.packages.each do |pkg|
if pkg.os_list.include? @os and pkg.build_host_os.include? host_os then
return true
end
end
# check package name
- for pkg in get_packages()
- for wpkg in wjob.get_packages()
+ get_packages().each do |pkg|
+ wjob.get_packages().each do |wpkg|
if pkg.package_name == wpkg.package_name then
#puts "Removed from candiated... A == B"
return true
def does_depend_on?( wjob )
# compare build dependency
- for dep in get_build_dependencies(@os)
- for wpkg in wjob.get_packages()
+ get_build_dependencies(@os).each do |dep|
+ wjob.get_packages().each do |wpkg|
# dep packages of my job must have same name and target os
# with packages in working job
if dep.package_name == wpkg.package_name and
def does_depended_by?( wjob )
- for pkg in get_packages()
- for dep in wjob.get_build_dependencies(wjob.os)
+ get_packages().each do |pkg|
+ wjob.get_build_dependencies(wjob.os).each do |dep|
# dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == pkg.package_name and
# check [project,os] is in reverse fail project list
def is_rev_fail_project( prj, os )
# check the project already exist
- for p in @rev_fail_projects
+ @rev_fail_projects.each do |p|
if p[0] == prj and p[1] == os then
return true
end
# add [project,os] to reverse fail project list
def add_rev_fail_project( prj, os )
# check the project already exist
- for p in @rev_fail_projects
+ @rev_fail_projects.each do |p|
if p[0] == prj and p[1] == os then
return
end
remove_list = []
# check project and os name
- for p in @rev_fail_projects
+ @rev_fail_projects.each do |p|
if p[0] == prj and p[1] == os then
remove_list.push p
end
end
# remove
- for r in remove_list
+ remove_list.each do |r|
@rev_fail_projects.delete r
end
end
# check if the project is my dependent project
def is_build_dependent_project( prj, os )
dep_list = get_build_dependent_projects()
- for dep in dep_list
+ dep_list.each do |dep|
if dep[0] == prj and dep[1] == os then
return true
end
end
# compare with package version in package server
- for pkg in @pkginfo.packages
+ @pkginfo.packages.each do |pkg|
# check all supported os
ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version")
# ignore if package does not exist
@log.info( "Checking build dependency ...", Log::LV_USER)
@pkgsvr_client.update
unmet_bdeps = []
- for dep in @pkginfo.get_build_dependencies( @os )
+ @pkginfo.get_build_dependencies( @os ).each do |dep|
# if parent exist, search parent source path first
# if not found, check package server
ver_svr = nil
@log.info( "Checking install dependency ...", Log::LV_USER)
unmet_ideps = []
- for dep in @pkginfo.get_install_dependencies( @os )
+ @pkginfo.get_install_dependencies( @os ).each do |dep|
# if parent exist, search pkginfos for all sub jobs
# if not found, check package server
found = false
# get local packages to install
src_path = @parent.source_path
deps = @pkginfo.get_build_dependencies(@os)
- for dep in deps
+ deps.each do |dep|
pkgs = get_local_required_packages( dep, src_path )
local_pkgs += pkgs
end
local_pkgs += @external_pkgs
src_path = @pending_ancestor.source_path
ver = @pending_ancestor.pkginfo.get_version()
- for pkg in @pending_ancestor.pkginfo.get_target_packages(@os)
+ @pending_ancestor.pkginfo.get_target_packages(@os).each do |pkg|
local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip"
end
@pending_ancestor.rev_success_jobs.each do |job|
src_path = job.source_path
ver = job.pkginfo.get_version()
- for pkg in job.pkginfo.get_target_packages(@os)
+ job.pkginfo.get_target_packages(@os).each do |pkg|
local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip"
end
end
# get package path list
binpkg_path_list = Dir.glob("#{@source_path}/*_*_#{@os}.zip")
- for file in binpkg_path_list
+ binpkg_path_list.each do |file|
@log.info( " * #{file}", Log::LV_USER)
FileUtils.cp(file,"#{dst_path}/")
end
while( not @finish )
# update friend server status
- for server in @friend_servers
+ @friend_servers.each do |server|
# update state
server.update_state
end
def add_remote_server( ip, port )
# if already exit, return false
- for svr in @friend_servers
+ @friend_servers.each do |svr|
if svr.ip.eql? ip and svr.port == port then
return false
end
def add_remote_package_server( url, proxy )
# if already exit, return false
- for entry in @remote_pkg_servers
+ @remote_pkg_servers.each do |entry|
u = entry[0]
if u == url then
def add_target_os( os_name )
# if already exit, return false
- for os in @supported_os_list
+ @supported_os_list.each do |os|
if os.eql? os_name then
return false
end
# get availables server
# but, job must not be "REGISTER" and "MULTIBUILD" job
if job.type != "REGISTER" and job.type != "MULTIBUILD" then
- for server in @friend_servers
+ @friend_servers.each do |server|
if ( server.status == "RUNNING" and server.can_build?( job ) and
not server.has_waiting_jobs and
server.get_number_of_empty_room > 0 )
# get best
# it is better if working jobs count is less
max_empty_room = best_server.get_number_of_empty_room
- for server in candidates
+ candidates.each do |server|
# check whether idle, use it
if not server.has_working_jobs then return server end
if can_build? job then return true end
#if not found, check friends
- for server in @friend_servers
+ @friend_servers.each do |server|
if server.status == "RUNNING" and
job.can_be_built_on? server.host_os then
return true
end
# check OS name
- for os in os_list
+ os_list.each do |os|
if not server.supported_os_list.include? os then
puts "Unsupported OS name \"#{os}\" is used!"
puts "Check the following supported OS list:"
- for s_os in server.supported_os_list
+ server.supported_os_list.each do |s_os|
puts " * #{s_os}"
end
# create sub jobs
build_jobs = []
- for prj in @server.prjmgr.projects
+ @server.prjmgr.projects.each do |prj|
if prj.type != "GIT" then next end
build_jobs += @server.prjmgr.create_new_jobs_for_all_os( prj.name )
end
# set full build job flag
- for job in build_jobs
+ build_jobs.each do |job|
job.is_fullbuild_job = true
job.set_parent_job( self )
end
# add all jobs to jobmanager
job_status_map = {} # for tracking job status changes
- for job in build_jobs
+ build_jobs.each do |job|
@server.jobmgr.add_internal_job( job )
@log.info( "Added new job \"#{job.id}\"(#{job.get_project().name}) for #{job.os}!",
error_exist = false
while not all_jobs_finished
all_jobs_finished = true
- for job in build_jobs
+ build_jobs.each do |job|
# check status chanaged, if then print
if job_status_map[ job.id ] != job.status then
contents.push " "
contents.push "%-30s| %10s | %10s" % ["package name", "version", "os"]
contents.push "---------------------------------------------------------------"
- for pkg in @pkginfo.packages
+ @pkginfo.packages.each do |pkg|
if not pkg.os.eql? @os then next end
mail_list = mail_list | Mail.parse_email( pkg.maintainer )
contents.push("%-30s| %10s | %10s" % [ pkg.package_name, pkg.version, pkg.os] )
# check name and version
pkginfo=@package_infos[version]
pkg_list = os.nil? ? pkginfo.packages : pkginfo.get_target_packages(os)
- for pkg in pkg_list
+ pkg_list.each do |pkg|
if pkg.package_name.eql? name then return true end
end
# write the line except my job_id
File.open(@list_file,"w") do |f|
- for l in lines
+ lines.each do |l|
if l.split(",")[0].eql? @job_id then next end
f.puts l
end
# scan all jobs
- for id in Dir.new(jobs_path).entries
+ Dir.new(jobs_path).entries.each do |id|
# skip . or ..
if id.eql? "." or id.eql? ".." then next end
# for reverse build jobs
job_list = @reverse_build_jobs
- for job in job_list
+ job_list.each do |job|
# if "ERROR", "FINISHED", "CANCELED" remove it from list
if job.status == "ERROR"
@parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
# for internal jobs
job_list = @internal_jobs
- for job in job_list
+ job_list.each do |job|
# if "ERROR", "FINISHED", "CANCELED" remove it from list
if job.status == "ERROR"
@parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
# for normal job
job_list = @jobs
- for job in job_list
+ job_list.each do |job|
# if "ERROR", "FINISHED", "CANCELED" remove it from list
if job.status == "ERROR"
@parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
def get_available_job
# check reverse build job first
selected_job = nil
- for job in @reverse_build_jobs
+ @reverse_build_jobs.each do |job|
if job.status == "WAITING" then
selected_job = job
break
def get_number_of_empty_room
working_cnt = 0
parent_list = []
- for job in @jobs + @internal_jobs + @reverse_build_jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WORKING" then
working_cnt = working_cnt + 1
end
# check there are working jobs
def has_working_jobs
- for job in @jobs + @internal_jobs + @reverse_build_jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WORKING" then
return true
end
# check there are waiting jobs
def has_waiting_jobs
- for job in @jobs + @internal_jobs + @reverse_build_jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WAITING" then
return true
end
def get_working_jobs
result = []
- for job in @jobs + @internal_jobs + @reverse_build_jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WORKING" then
result.push job
end
def get_waiting_jobs
result = []
- for job in @jobs + @internal_jobs + @reverse_build_jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WAITING" then
result.push job
end
def get_remote_jobs
result = []
- for job in @jobs + @internal_jobs + @reverse_build_jobs
+ (@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "REMOTE_WORKING" then
result.push job
end
def get_pending_jobs
result = []
- for job in @jobs
+ @jobs.each do |job|
if job.status == "PENDING" then
result.push job
end
# gather all working jobs and full-build jobs
check_dep_jobs = []
- for job in jobs
+ jobs.each do |job|
if job.cancel_state != "NONE" then next end
if job.status == "WORKING" or job.status == "REMOTE_WORKING" or job.status == "PENDING" then
end
# for waiting jobs
- for job in jobs
+ jobs.each do |job|
if job.cancel_state != "NONE" then next end
if job.status != "WAITING" then next end
# check build dependency against working job
pre_jobs = []
- for cjob in check_dep_jobs
+ check_dep_jobs.each do |cjob|
if job == cjob then next end
# In case that "WORKING/REMOTE_WORKING" job has build dependency on me
if (cjob.status == "WORKING" or cjob.status == "REMOTE_WORKING" ) and
if pre_jobs.count != job.pre_jobs.count then
is_changed=true
else
- for pjob in pre_jobs
+ pre_jobs.each do |pjob|
if not job.pre_jobs.include? pjob then
is_changed = true
break
end
if pre_jobs.count > 0 and is_changed then
job.log.info( "Waiting for finishing following jobs:", Log::LV_USER)
- for bjob in pre_jobs
+ pre_jobs.each do |bjob|
if bjob.type == "BUILD" then
job.log.info( " * #{bjob.id} #{bjob.pkginfo.packages[0].source}", Log::LV_USER)
elsif bjob.type == "MULTIBUILD" then
end
# initialize all sub jobs and add them to "internal_jobs"
- for job in @sub_jobs
+ @sub_jobs.each do |job|
# initialize job
if not job.init or job.status == "ERROR" then
job.status = "ERROR"
# report error
if @status == "ERROR" then
# register delayed clean action for sub jobs
- for job in @sub_jobs
+ @sub_jobs.each do |job|
@server.cleaner.clean_afterwards(job.id)
end
elsif @status == "CANCELED" then
# register delayed clean action for sub jobs
- for job in @sub_jobs
+ @sub_jobs.each do |job|
@server.cleaner.clean_afterwards(job.id)
end
else
# terminate all sub jobs
- for job in @sub_jobs
+ @sub_jobs.each do |job|
if not job.log.nil? then job.terminate() end
end
def get_packages()
packages = []
- for job in @sub_jobs
+ @sub_jobs.each do |job|
packages = packages + job.get_packages()
end
packages.uniq!
def get_build_dependencies(target_os)
deps = []
- for job in @sub_jobs
+ @sub_jobs.each do |job|
deps = deps + job.get_build_dependencies(target_os)
end
deps.uniq!
def get_source_dependencies(target_os, host_os)
deps = []
- for job in @sub_jobs
+ @sub_jobs.each do |job|
deps = deps + job.get_source_dependencies(target_os,host_os)
end
deps.uniq!
end
# check package name
- for pkg in get_packages
- for wpkg in wjob.get_packages()
+ get_packages.each do |pkg|
+ wjob.get_packages().each do |wpkg|
if pkg.package_name == wpkg.package_name then
#puts "Removed from candiated... A == B"
return true
def does_depend_on?( wjob )
# compare build dependency
- for dep in get_build_dependencies(@os)
- for wpkg in wjob.get_packages()
+ get_build_dependencies(@os).each do |dep|
+ wjob.get_packages().each do |wpkg|
# dep packages of my job must have same name and target os
# with packages in working job
if dep.package_name == wpkg.package_name and
def does_depended_by?( wjob )
- for pkg in get_packages()
- for dep in wjob.get_build_dependencies(wjob.os)
+ get_packages().each do |pkg|
+ wjob.get_build_dependencies(wjob.os).each do |dep|
# dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == pkg.package_name and
# initialize status map
job_status_map = {}
- for job in @sub_jobs
+ @sub_jobs.each do |job|
job_status_map[job.id] = job.status
end
# add to internal job
@server.jobmgr.internal_job_schedule.synchronize {
- for job in @sub_jobs
+ @sub_jobs.each do |job|
# init finished, add internal_jobs
@server.jobmgr.add_internal_job(job)
@log.info( "Added new job \"#{job.get_project().name}\" for #{job.os}! (#{job.id})",
stop_status = "FINISHED"
while not all_jobs_finished
all_jobs_finished = true
- for job in @sub_jobs
+ @sub_jobs.each do |job|
# check status chanaged, if then print
if job_status_map[ job.id ] != job.status then
@log.info(" * Sub-Job \"#{job.get_project().name}(#{job.os})\" has entered \"#{job.status}\" state. (#{job.id})", Log::LV_USER)
end
# scan all projects
- for name in Dir.new(@project_root).entries
+ Dir.new(@project_root).entries.each do |name|
# skip . or ..
if name.eql? "." or name.eql? ".." then next end
# get_project of the name
def get_project ( name )
- for prj in @projects
+ @projects.each do |prj|
if prj.name.eql? name then return prj end
end
def create_new_multi_build_job( sub_job_list )
result = MultiBuildJob.new( @server )
- for job in sub_job_list
+ sub_job_list.each do |job|
result.add_sub_job( job )
end
result = MultiBuildJob.new( @server )
# create sub jobs
- for prj in @projects
+ @projects.each do |prj|
if prj.type != "GIT" then next end
- for os in prj.os_list
+ prj.os_list.each do |os|
if not @server.supported_os_list.include? os then next end
new_job = create_new_job( prj.name, os )
# will return [project,os,ver] list
def get_projects_from_pkgs(pkgs)
result = []
- for prj in @projects
- for pkg in pkgs
+ @projects.each do |prj|
+ pkgs.each do |pkg|
name = pkg.package_name
ver = pkg.version
os = pkg.os
def get_project_from_package_name(pkg_name)
- for prj in @projects
+ @projects.each do |prj|
# check project provide target package
if prj.include_package?(pkg_name) then
return prj
# read pkginfo
pkginfo_dir = "#{@project_root}/#{name}/pkginfos"
if not File.exist? pkginfo_dir then FileUtils.mkdir_p pkginfo_dir end
- for file in Dir.new(pkginfo_dir).entries
+ Dir.new(pkginfo_dir).entries.each do |file|
if file.eql? "." or file.eql? ".." then next end
vlen = file.length - ".manifest".length
def does_depended_by?( wjob )
if @pkg_type == "BINARY" then
- for dep in wjob.get_build_dependencies(wjob.os)
+ wjob.get_build_dependencies(wjob.os).each do |dep|
# dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == @pkg_name and
end
end
else
- for dep in wjob.get_source_dependencies(wjob.os,@host_os)
+ wjob.get_source_dependencies(wjob.os,@host_os).each do |dep|
if dep.package_name == @pkg_name then
return true
end
@pkginfo.packages[0].os_list.count > 1 then
pkg = @pkginfo.packages[0]
- for os in pkg.os_list
+ pkg.os_list.each do |os|
if @os == os then next end
# skip when the os does not exist in project's supported os list
# create reverse build job
rev_build_jobs = []
- for p in rev_projects
+ rev_projects.each do |p|
prj = p[0]
os = p[1]
version = p[2]
rev_prjs_txt = rev_build_jobs.map {|j| "#{j.get_project().name}(#{j.os})"}.join(", ")
@log.info( " * Will check reverse-build for next projects: #{rev_prjs_txt}", Log::LV_USER)
end
- for new_job in rev_build_jobs
+ rev_build_jobs.each do |new_job|
@log.info( " * Checking reverse-build ... #{new_job.get_project().name}(#{new_job.id})", Log::LV_USER)
# job init
result = new_job.init()
# package update
@pkgsvr_client.update
- for pkg in @pkginfo.packages
+ @pkginfo.packages.each do |pkg|
ver_local = pkg.version
#ver_svr = @pkgsvr_client.get_package_version( pkg.package_name, @os )
ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version")
def build( git_repos, source_path, os, is_rev_build, srcinfo, local_pkgs )
@log.info( "Start to build on remote server...", Log::LV_USER )
# send local packages
- for pkg_path in local_pkgs
+ local_pkgs.each do |pkg_path|
@log.info( "Sending file... : #{pkg_path}", Log::LV_USER )
result = send_file_to_remote( pkg_path )
if not result then
end
# receive binary package
- for file_name in result_files
+ result_files.each do |file_name|
@log.info( "Receiving file from remote server : #{file_name}", Log::LV_USER )
result = receive_file_from_remote( "#{source_path}/#{file_name}" )
if not result then
src_pkg_name_list = []
case job.type
when "BUILD"
- for pkg in job.pkginfo.get_target_packages(job_os)
+ job.pkginfo.get_target_packages(job_os).each do |pkg|
bin_pkg_name_list.push pkg.package_name
end
when "REGISTER"
# get reverse projects from build dependency
rev_pkgs = []
- for pkg_name in bin_pkg_name_list
+ bin_pkg_name_list.each do |pkg_name|
rev_pkgs += job.pkgsvr_client.get_reverse_build_dependent_packages(pkg_name, job_os)
end
- for pkg_name in src_pkg_name_list
+ src_pkg_name_list.each do |pkg_name|
rev_pkgs += job.pkgsvr_client.get_reverse_source_dependent_packages(pkg_name)
end
rev_pkgs.uniq!
# create reverse build job
rev_build_jobs = []
- for p in rev_projects
+ rev_projects.each do |p|
rev_prj = p[0]
rev_os = p[1]
rev_ver = p[2]
# if this is sub job, all other sibling job must be excluded
if job.is_sub_job? then
- for sub_job in job.get_parent_job().get_sub_jobs
+ job.get_parent_job().get_sub_jobs.each do |sub_job|
sub_prj = sub_job.get_project()
sub_os = sub_job.os
if rev_prj == sub_prj and rev_os == sub_os then
end
# for all reverse job
- for rev_job in rev_build_jobs
+ rev_build_jobs.each do |rev_job|
# add to job manager
job.server.jobmgr.add_reverse_build_job(rev_job)
log.info( " * Added new job for reverse-build ... \
cancel_other_jobs = false
while not rev_build_finished
rev_build_finished = true
- for rev_job in rev_build_jobs
+ rev_build_jobs.each do |rev_job|
rev_prj = rev_job.get_project()
rev_os = rev_job.os
# if "exist on error" cancel all other jobs
if exit_on_error then
cancel_other_jobs = true
- for j in rev_build_jobs
+ rev_build_jobs.each do |j|
if j.status != "ERROR" and j.status != "FINISHED" and
j.status != "CANCELED" and j.cancel_state == "NONE" then
end
# clean up all reverse build jobs
- for rev_job in rev_build_jobs
+ rev_build_jobs.each do |rev_job|
if rev_job.status == "ERROR" or rev_job.status == "CANCELED" then
rev_job.server.cleaner.clean_afterwards(rev_job.id)
else
private
def self.is_project_included?( prj_list, prj, os )
- for p in prj_list
+ prj_list.each do |p|
if p[0] == prj and p[1] == os then return true end
end
# send the status
BuildCommServer.send_begin(req)
- for job in job_list
+ job_list.each do |job|
status = job.status
if status == "REMOTE_WORKING" then status = "REMOTE" end
if job.cancel_state != "NONE" then status = "CANCEL" end
BuildCommServer.send_begin(req)
# print GIT projects
sorted_list = @parent_server.prjmgr.projects.sort { |x,y| x.name <=> y.name }
- for prj in sorted_list
+ sorted_list.each do |prj|
if prj.type != "GIT" then next end
BuildCommServer.send(req,"G,#{prj.name},#{prj.repository},#{prj.branch}")
end
# print BINARY projects
- for prj in sorted_list
+ sorted_list.each do |prj|
if prj.type != "BINARY" then next end
BuildCommServer.send(req,"B,#{prj.name},#{prj.pkg_name}")
end
# print REMOTE project
- for prj in sorted_list
+ sorted_list.each do |prj|
if prj.type != "REMOTE" then next end
BuildCommServer.send(req,"R,#{prj.name}")
end
when "OS"
BuildCommServer.send_begin(req)
# print GIT projects
- for os_name in @parent_server.supported_os_list
+ @parent_server.supported_os_list.each do |os_name|
BuildCommServer.send(req,"#{os_name}")
end
BuildCommServer.send_end(req)
end
result = []
- for os in os_list
+ os_list.each do |os|
if os == "all" or os == "*" then
result = result + @parent_server.supported_os_list
elsif os.include? "*" then
reg_os = os.gsub("*","[a-zA-Z0-9.]*")
- for svr_os in @parent_server.supported_os_list
+ @parent_server.supported_os_list.each do |svr_os|
matches = svr_os.match("#{reg_os}")
if not matches.nil? and matches.size == 1 and
matches[0] == svr_os then
BuildCommServer.send_begin(req)
req.puts "Error: Unsupported OS name \"#{os}\" is used!"
req.puts "Error: Check the following supported OS list. "
- for os_name in @parent_server.supported_os_list
+ @parent_server.supported_os_list.each do |os_name|
req.puts " * #{os_name}"
end
BuildCommServer.send_end(req)
return false
end
else
- for l in local_dep_pkgs
+ local_dep_pkgs.each do |l|
@log.info( "Installing local pacakge...#{l}", Log::LV_USER)
cl.install_local_pkg(l,true,false,File.dirname(l))
end
src_archive_list.push dep.package_name
end
src_archive_list.uniq!
- for archive_name in src_archive_list
+ src_archive_list.each do |archive_name|
@log.info( " * #{archive_name}", Log::LV_USER)
if cl.download_dep_source(archive_name).nil? then
@log.error( "Downloading \"#{archive_name}\" failed!", Log::LV_USER)
# generate shell script
File.open( "#{src_path}/.build.sh", "w" ) do |f|
f.puts "#!/bin/sh -xe"
- for l in contents
+ contents.each do |l|
f.puts l
end
# get category
os_category = Utils.get_os_category( os )
- for pkg in pkginfo.packages
+ pkginfo.packages.each do |pkg|
# skip if not support the target os
if not pkg.os_list.include? os
next
# get category
os_category = Utils.get_os_category( os )
- for pkg in pkginfo.packages
+ pkginfo.packages.each do |pkg|
# skip if not support the target os
if not pkg.os_list.include? os
next
# get category
# make clean
- for pkg in pkginfo.packages
+ pkginfo.packages.each do |pkg|
os = pkg.os
os_category = Utils.get_os_category( os )
@log.error( e.message, Log::LV_USER)
return false
end
- for pkg in pkginfo.packages
+ pkginfo.packages.each do |pkg|
os = pkg.os
os_category = Utils.get_os_category( os )
def get_build_dependencies( target_os )
# for all
list = []
- for pkg in @packages
+ @packages.each do |pkg|
# package that has the target os
if not pkg.os_list.include?(target_os)
next
end
# package that has the target os
- for dep in pkg.build_dep_list
+ pkg.build_dep_list.each do |dep|
list.push dep
end
end
def get_source_dependencies( target_os, host_os )
# for all
list = []
- for pkg in @packages
+ @packages.each do |pkg|
# only package that used in target os
if not pkg.os_list.include?(target_os)
next
end
# package that has the target os
- for dep in pkg.source_dep_list
+ pkg.source_dep_list.each do |dep|
# if dep.target_os_list.include? target_os
list.push dep
# end
def get_install_dependencies( target_os, pkg_name=nil )
# for all
list = []
- for pkg in @packages
+ @packages.each do |pkg|
if not pkg_name.nil? and pkg.package_name != pkg_name then next end
# only package that used in target os
if not pkg.os_list.include?(target_os)
end
# package that has the target os
- for dep in pkg.install_dep_list
+ pkg.install_dep_list.each do |dep|
list.push dep
end
end
def package_exist?(target_os, host_os)
- for pkg in @packages
+ @packages.each do |pkg|
# only package that used in target os
if pkg.os_list.include?(target_os) and
pkg.build_host_os.include?(host_os)
def get_target_packages(target_os)
pkgs = []
- for pkg in @packages
+ @packages.each do |pkg|
if pkg.os_list.include?(target_os) then
pkgs.push pkg
end
def pkg_exist?(name,ver,os)
- for pkg in @packages
+ @packages.each do |pkg|
if pkg.package_name != name then next end
if not ver.nil? and pkg.version != ver then next end
if not os.nil? and not pkg.os_list.include?(os) then next end
# get list
action_list = Array.new(@actions)
- for action in action_list
+ action_list.each do |action|
# if its time is reached, execute action
if not action.time.nil? and current_time > action.time then
action.execute
ftp.chdir(dirname)
list = ftp.ls
# TODO: if list is directory?
- for l in list
+ list.each do |l|
file = l.split(" ")[-1].strip
ftp.delete(file)
end
def get_reverse_source_dependent_packages(pkg_name)
result = []
- for os in @support_os_list
+ @support_os_list.each do |os|
pkg_hash = @pkg_hash_os[os]
pkg_list = pkg_hash.values
pkg_list.each do |pkg|
end
def read_remote_pkg_list(list_path)
- for os in @support_os_list
+ @support_os_list.each do |os|
filename = PKG_LIST_FILE_PREFIX + os
local_file_path = File.join(list_path, filename)
if File.exist? local_file_path then
def download_pkg_list(from_server, dist = nil)
if dist.nil? then dist = get_pkglist_path end
- for os in @support_os_list
+ @support_os_list.each do |os|
filename = PKG_LIST_FILE_PREFIX + os
file_url = File.join(@server_addr, @snapshot_path, filename)
if from_server then
end
# copy package list
- for os in @support_os_list
+ @support_os_list.each do |os|
FileUtils.copy_file( "#{snapshot_path}/#{PKG_LIST_FILE_PREFIX}#{os}",
"#{@location}/snapshots/#{name}/#{PKG_LIST_FILE_PREFIX}#{os}" )
end
used_archive_list = []
# collect remaining file's name from current package server version
- for os in @support_os_list
+ @support_os_list.each do |os|
@pkg_hash_os[os].each_value{ |pkg|
file_list.push(pkg.path.sub("/binary/",""))
os_list = @support_os_list
end
- for os in os_list
+ os_list.each do |os|
begin
info_file = "#{@location}/snapshots/#{snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}"
if not File.exist? info_file then
end
def write_all_pkg_list
- for os in @support_os_list
+ @support_os_list.each do |os|
write_pkg_list(os)
end
end
else os_list = [ os ]
end
- for package_name in pkg_name_list
+ pkg_name_list.each do |package_name|
removed_flag = false
- for os in os_list
+ os_list.each do |os|
if not @support_os_list.include? os then
@log.error( "package server does not support input os : #{os}")
next
# update pkg_list file
- for os in os_list
+ os_list.each do |os|
write_pkg_list(os)
end
write_archive_pkg_list
def check_integrity
@log.info "check server pkg's install dependency integrity"
- for os in @support_os_list
- for pkg in @pkg_hash_os[os].each_value
+ @support_os_list.each do |os|
+ @pkg_hash_os[os].each_value.each do |pkg|
check_package_integrity(pkg)
end
end
error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s install dependency not matched in "
os = pkg.os
- for dep in pkg.install_dep_list
+ pkg.install_dep_list.each do |dep|
if @pkg_hash_os[os].has_key? dep.package_name then
target_pkg = @pkg_hash_os[os][dep.package_name]
else
end
error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s build dependency not matched in "
- for dep in pkg.build_dep_list
+ pkg.build_dep_list.each do |dep|
if dep.target_os_list.length == 0 then
build_dep_os = os
else
end
error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s source dependency not matched in "
- for dep in pkg.source_dep_list
+ pkg.source_dep_list.each do |dep|
if not @archive_pkg_list.include? dep.package_name then
raise RuntimeError,(error_msg + dep.to_s)
end
end
# read package_list file
- for os in @support_os_list
+ @support_os_list.each do |os|
@pkg_hash_os[os] = {}
pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"
def get_all_reverse_depends_pkgs(pkg, checked_list)
depends_list = []
- for os in @support_os_list
+ @support_os_list.each do |os|
@pkg_hash_os[os].each_value{ |dpkg|
if dpkg.install_dep_list.include? pkg or \
dpkg.build_dep_list.include? pkg then
end
# read binary package_list file
- for os in @support_os_list
+ @support_os_list.each do |os|
@pkg_hash_os[os] = {}
pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"