i = i + 1
info = item.split(",").map { |x| x.strip }
if info[0] == "DISCONNECTED" then
- puts "#{i}. #{info[0]}"
+ puts "#{i}. #{info[0]}"
else
puts "#{i}. #{info[0]} #{info[1]} server (#{info[2]}|#{info[3]}/#{info[4]}) [#{info[5]}]"
end
# if "--os" is not specified, use pe
-if option[:os].nil? then
+if option[:os].nil? then
option[:os] = "default"
end
end
begin
- case option[:cmd]
- when "build"
+ case option[:cmd]
+ when "build"
result = Utils.parse_server_addr(option[:domain])
if result.nil? then
puts "Server address is incorrect. (#{option[:domain]})"
end
client = BuildCommClient.create( result[0], result[1], nil, 0 )
if not client.nil? then
- client.send "RESOLVE|GIT|#{option[:project]}|#{option[:passwd]}|#{option[:os]}|#{option[:async]}|#{option[:dist]}"
+ client.send "RESOLVE|GIT|#{option[:project]}|#{option[:passwd]}|#{option[:os]}|#{option[:async]}|#{option[:dist]}"
client.print_stream
client.terminate
end
end
when "register"
- # check file exist
+ # check file exist
if not File.exist? option[:package] then
puts "The file does not exist!.. #{option[:package]}"
exit(-1)
-#!/usr/bin/ruby
+#!/usr/bin/ruby
=begin
#set global variable
@WORKING_DIR = nil
-#option parsing
+#option parsing
begin
option = option_parse
rescue => e
option[:os] = Utils::HOST_OS
end
-case option[:cmd]
+case option[:cmd]
when "update" then
client = Client.new( option[:url], nil, nil )
#client.update()
#if not option[:url].nil? then
# client.update()
#end
- client.install( option[:pkg], option[:os], option[:t], option[:f] )
+ client.install( option[:pkg], option[:os], option[:t], option[:f] )
when "install-file" then
client = Client.new( option[:url], option[:loc], nil )
- client.install_local_pkg( option[:pkg], option[:t], option[:f] )
+ client.install_local_pkg( option[:pkg], option[:t], option[:f] )
when "uninstall" then
client = Client.new( nil, option[:loc], nil )
client.uninstall( option[:pkg], option[:t] )
#if not option[:url].nil? then
# client.update()
#end
- puts client.show_pkg_info( option[:pkg], option[:os] )
+ puts client.show_pkg_info( option[:pkg], option[:os] )
when "list-rpkg" then
client = Client.new( option[:url], nil, nil )
#if not option[:url].nil? then
desc = i[2].strip
puts name + " (" + version + ")"
end
- else
+ else
puts "Info: There is no any package."
end
when "build-dep" then
-#!/usr/bin/ruby
+#!/usr/bin/ruby
=begin
require "packageServer"
require "serverOptParser"
-#option parsing
+#option parsing
begin
option = option_parse
rescue => e
exit 0
end
-begin
+begin
if option[:cmd].eql? "list" then
if option[:id].empty? then
- PackageServer.list_id
+ PackageServer.list_id
else
PackageServer.list_dist option[:id]
- end
- exit
- end
+ end
+ exit
+ end
server = PackageServer.new( option[:id] )
- if server.nil?
+ if server.nil?
raise RuntimeError, "server class creation fail"
end
- case option[:cmd]
- when "create"
+ case option[:cmd]
+ when "create"
server.create( option[:id], option[:dist], option[:url], option[:loc] )
when "register"
- server.register( option[:pkgs], option[:dist], option[:gensnap], option[:test], false )
+ server.register( option[:pkgs], option[:dist], option[:gensnap], option[:test], false )
when "gen-snapshot"
- server.generate_snapshot( option[:snaps][0], option[:dist], option[:bsnap] )
+ server.generate_snapshot( option[:snaps][0], option[:dist], option[:bsnap] )
when "sync"
server.sync( option[:dist], option[:force], option[:snaps][0] )
when "add-dist"
if not option[:force] then
puts "Do you want to really? then input \"YES\""
input = $stdin.gets.strip
- if input.eql? "YES" then
+ if input.eql? "YES" then
puts "Remove server!"
else
puts "Remove is canceled by user input"
if not option[:force] then
puts "Do you want to really? then input \"YES\""
input = $stdin.gets.strip
- if input.eql? "YES" then
+ if input.eql? "YES" then
puts "Remove server!"
else
puts "Remove is canceled by user input"
else
raise RuntimeError, "input option incorrect : #{option[:cmd]}"
end
-rescue => e
+rescue => e
puts e.message
end
config_file = "#{@path}/build"
pkg_name=nil
File.open( config_file, "r" ) do |f|
- f.each_line do |l|
+ f.each_line do |l|
idx = l.index("=") + 1
- length = l.length - idx
+ length = l.length - idx
if l.start_with?("PACKAGE_NAME=")
pkg_name = l[idx,length].strip
=begin
- BuildClientOptionParser.rb
+ BuildClientOptionParser.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
require 'utils'
class BuildClientUsage
- BUILD="build-cli build -N <project name> -d <server address> [-o <os>] [-w <password>] [--async] [-D <distribution name>]"
- RESOLVE="build-cli resolve -N <project name> -d <server address> [-o <os>] [-w <password>] [--async] [-D <distribution name>]"
- QUERY="build-cli query -d <server address>"
- QUERY_SYSTEM="build-cli query-system -d <server address>"
- QUERY_PROJECT="build-cli query-project -d <server address>"
- QUERY_JOB="build-cli query-job -d <server address>"
- CANCEL="build-cli cancel -j <job number> -d <server address> [-w <password>]"
- REGISTER="build-cli register -P <package file> -d <server address> [-t <ftp server url>] [-w <password>] [-D <distribution name>]"
+ BUILD="build-cli build -N <project name> -d <server address> [-o <os>] [-w <password>] [--async] [-D <distribution name>]"
+ RESOLVE="build-cli resolve -N <project name> -d <server address> [-o <os>] [-w <password>] [--async] [-D <distribution name>]"
+ QUERY="build-cli query -d <server address>"
+ QUERY_SYSTEM="build-cli query-system -d <server address>"
+ QUERY_PROJECT="build-cli query-project -d <server address>"
+ QUERY_JOB="build-cli query-job -d <server address>"
+ CANCEL="build-cli cancel -j <job number> -d <server address> [-w <password>]"
+ REGISTER="build-cli register -P <package file> -d <server address> [-t <ftp server url>] [-w <password>] [-D <distribution name>]"
end
when "query" then
if options[:domain].nil? or options[:domain].empty? then
raise ArgumentError, "Usage: " + BuildClientUsage::QUERY
- end
+ end
when "query-system" then
if options[:domain].nil? or options[:domain].empty? then
raise ArgumentError, "Usage: " + BuildClientUsage::QUERY_SYSTEM
- end
+ end
when "query-project" then
if options[:domain].nil? or options[:domain].empty? then
raise ArgumentError, "Usage: " + BuildClientUsage::QUERY_PROJECT
- end
+ end
when "query-job" then
if options[:domain].nil? or options[:domain].empty? then
raise ArgumentError, "Usage: " + BuildClientUsage::QUERY_JOB
- end
+ end
when "cancel" then
if options[:job].nil? or options[:job].empty? or
# Set a banner, displayed at the top
# of the help screen.
- opts.banner = banner
+ opts.banner = banner
opts.on( '-N', '--project <project name>', 'project name' ) do|project|
if not Utils.multi_argument_test( project, "," ) then
end
- cmd = ARGV[0]
+ cmd = ARGV[0]
if cmd.eql? "build" or cmd.eql? "resolve" or
cmd.eql? "query" or cmd.eql? "query-system" or
cmd.eql? "query-project" or cmd.eql? "query-job" or
cmd.eql? "cancel" or
cmd.eql? "register" or
- cmd =~ /(-v)|(--version)/ or
+ cmd =~ /(-v)|(--version)/ or
cmd =~ /(help)|(-h)|(--help)/ then
if cmd.eql? "help" then
- ARGV[0] = "-h"
+ ARGV[0] = "-h"
end
options[:cmd] = ARGV[0]
option_error_check options
return options
-end
+end
end
def set_no_reverse()
- @no_reverse = true
- end
+ @no_reverse = true
+ end
def set_internal_job( dock_num )
# set force rebuild
- # This make project to build
+ # This make project to build
# even though there is a package of same version on pkg-server
def set_force_rebuild(value)
@force_rebuild = value
if job.get_parent_job() == self and job.cancel_state == "NONE" then
job.cancel_state = "INIT"
end
- end
+ end
# cancel log print
if not @log.nil? then
when "PENDING" then
if @pending_ancestor.nil? then
#resolve pending job
- pending_descendants = @server.jobmgr.jobs.select do |j|
+ pending_descendants = @server.jobmgr.jobs.select do |j|
(not j.pending_ancestor.nil?) and "#{j.pending_ancestor.id}" == "#{@id}"
end
pending_descendants.each do |pd|
# remove the project that depends on me if exist
# and add it into rev_fail_project list if not exist
- p_sub_jobs = @server.jobmgr.jobs.select do |j|
- ( not j.pending_ancestor.nil? and
+ p_sub_jobs = @server.jobmgr.jobs.select do |j|
+ ( not j.pending_ancestor.nil? and
"#{j.pending_ancestor.id}" == "#{@pending_ancestor.id}" and
- j.is_build_dependent_project(@project, @os) )
+ j.is_build_dependent_project(@project, @os) )
end
p_sub_jobs.each do |d|
@pending_ancestor.remove_rev_success_job(d)
other_project = o.get_project()
# check project name
- if my_project.nil? or other_project.nil? or
+ if my_project.nil? or other_project.nil? or
my_project.name != other_project.name then
return false
end
# check version
if @pkginfo.nil? or o.pkginfo.nil? or
not (Version.new(@pkginfo.get_version()) == Version.new(o.pkginfo.get_version())) then
- return false
+ return false
end
- # check compat os
+ # check compat os
@pkginfo.get_target_packages(@os).each do |p|
if not p.os_list.include?(o.os) then return false end
end
# compare build dependency
get_build_dependencies(@os).each do |dep|
wjob.get_packages().each do |wpkg|
- # dep packages of my job must have same name and target os
+ # dep packages of my job must have same name and target os
# with packages in working job
if dep.package_name == wpkg.package_name and
dep.target_os_list.include? wjob.os then
get_packages().each do |pkg|
wjob.get_build_dependencies(wjob.os).each do |dep|
- # dep package of working job must have same name and target os
+ # dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == pkg.package_name and
dep.target_os_list.include? @os then
def get_build_dependent_projects()
if @build_dep_prjs.nil? then
deps = @pkginfo.get_build_dependencies(@os)
- pkgs = deps.map{|x|
+ pkgs = deps.map do |x|
# if "os" is not specified, use my "os"
if x.target_os_list.nil? or x.target_os_list.empty? then
os = @os
# package as item
@pkgsvr_client.get_pkg_from_list(x.package_name, os)
- }
+ end
prjs = @server.prjmgr.get_projects_from_pkgs(pkgs)
@build_dep_prjs = prjs
end
#
- # main module
+ # main module
protected
def job_main()
@log.info( "Invoking a thread for building Job #{@id}", Log::LV_USER)
# if not found, check package server
found = false
if not @parent.nil? and @parent.type == "MULTIBUILD" then
- @parent.get_sub_jobs().each { |j|
+ @parent.get_sub_jobs().each do |j|
os = (dep.target_os_list.empty?) ? @os : dep.target_os_list[0]
if j.pkginfo.pkg_exist?(dep.package_name, dep.base_version, os) then
found = true; break
end
- }
+ end
end
if found then next end
# unmet dependencies found , report the errors
if not unmet_bdeps.empty? or not unmet_ideps.empty? then
@log.error( "Unmet dependency found!", Log::LV_USER)
- unmet_bdeps.each { |d|
+ unmet_bdeps.each do |d|
os = (d.target_os_list.empty?) ? @os : d.target_os_list[0]
@log.error( " * #{d.package_name}(#{os}) for build-dependency", Log::LV_USER)
- }
- unmet_ideps.each { |d|
+ end
+ unmet_ideps.each do |d|
os = (d.target_os_list.empty?) ? @os : d.target_os_list[0]
@log.error( " * #{d.package_name}(#{os}) for install-dependency", Log::LV_USER)
- }
+ end
return false
else
# it will return nil if not exist
# this process must be skip if it is sub-job
if not @is_rev_build_check_job and not @is_internal_job then
- @server.cancel_lock.synchronize{
+ @server.cancel_lock.synchronize do
@pending_ancestor = get_pending_ancestor_job()
- }
+ end
end
if not @pending_ancestor.nil? then
lver = Utils.get_version_from_package_file(lp)
los = Utils.get_os_from_package_file( lp )
if lpname == p.package_name and o == los and lver == p.version then
- compat_pkgs.push [p.package_name,o,lp]
+ compat_pkgs.push [p.package_name,o,lp]
compat_found = true
break
end
# check other package already in package server
ver_svr = @pkgsvr_client.get_attr_from_pkg( p.package_name, o, "version")
if not ver_svr.nil? and p.version.eql? ver_svr then
- compat_pkgs.push [p.package_name,o,nil]
+ compat_pkgs.push [p.package_name,o,nil]
compat_found = true
break
end
else
@log.info( "Downloading compatible package:#{pkg_name}(#{cos})", Log::LV_USER)
loc = @pkgsvr_client.download(pkg_name, cos, false)
- if loc.nil? or loc.count != 1 then
+ if loc.nil? or loc.count != 1 then
@log.warn( "Downloading compatible package failed!:#{pkg_name}(#{cos})", Log::LV_USER)
compat_ok = false
- break
+ break
end
ext = File.extname(loc[0])
base_package_name= File.basename(loc[0], "#{cos}#{ext}")
@log.error( "Reverse-build-check failed!" )
return false
end
- end
+ end
return true
end
end
@log.info( " - Log Path : #{@log.path}" )
- # build
+ # build
if @is_remote_job then
result = builder.build_job(self, [])
else
return true
end
- # pending
+ # pending
@status = "PENDING"
@log.info( "Entered the PENDING state ...", Log::LV_USER)
old_msg = ""
old_msg = new_msg
end
sleep 1
- end
+ end
return true
end
old_msg = ""
wait_prjs = @pending_ancestor.rev_fail_projects.select {|p| is_build_dependent_project(p[0], p[1])}
@log.info("Checking build dependency before RESOLVE", Log::LV_USER)
- while not wait_prjs.empty?
+ while not wait_prjs.empty?
@status = "PENDING"
new_msg = wait_prjs.map {|p| "#{p[0].name}(#{p[1]})"}.join(", ")
if new_msg != old_msg then
ver = @pending_ancestor.pkginfo.get_version()
@pending_ancestor.pkginfo.get_target_packages(@os).each do |pkg|
local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip"
- end
+ end
@pending_ancestor.rev_success_jobs.each do |job|
src_path = job.source_path
ver = job.pkginfo.get_version()
job.pkginfo.get_target_packages(@os).each do |pkg|
local_pkgs.push "#{src_path}/#{pkg.package_name}_#{ver}_#{@os}.zip"
end
- end
+ end
# build
if @is_remote_job then
file = "#{@source_path}/../log"
FileUtils.copy_file(file, "#{outgoing_dir}/remote_log")
- # copy result files, if not reverse build
+ # copy result files, if not reverse build
if not @is_rev_build_check_job then
return copy_result_files( outgoing_dir )
else
def get_local_path_of_dependency( dep, parent )
dep_target_os = get_os_of_dependency(dep)
- # search my parent job and its parent job
+ # search my parent job and its parent job
binpkgs = Dir.glob("#{parent.source_path}/#{dep.package_name}_*_#{dep_target_os}.zip")
if binpkgs.count == 0 and not parent.get_parent_job().nil? then
binpkgs = Dir.glob("#{parent.get_parent_job().source_path}/#{dep.package_name}_*_#{dep_target_os}.zip")
chained_deps = get_local_chained_dependencies( deps, parent )
# get all local path of dependencies
- chained_deps.each { |dep|
+ chained_deps.each do |dep|
new_path = get_local_path_of_dependency(dep, parent)
if not new_path.nil? then
pkg_paths.push new_path
end
- }
+ end
# remove duplicates
pkg_paths.uniq!
chained_deps = []
chained_deps += deps
- # if parent is multi build job, gether all install dependency of dependency.
+ # if parent is multi build job, gether all install dependency of dependency.
if parent.type == "MULTIBUILD" then
begin
old_deps_count = chained_deps.count
new_deps = []
- chained_deps.each { |dep|
+ chained_deps.each do |dep|
dep_target_os = get_os_of_dependency(dep)
- parent.get_sub_jobs().each { |j|
+ parent.get_sub_jobs().each do |j|
new_deps += j.pkginfo.get_install_dependencies(dep_target_os, dep.package_name)
- }
- }
+ end
+ end
chained_deps += new_deps
chained_deps.uniq! {|d| d.package_name }
end while chained_deps.count != old_deps_count
def remote_package_of_dependency_exist?(dep)
dep_target_os = get_os_of_dependency(dep)
- # search
+ # search
ver_svr = @pkgsvr_client.get_attr_from_pkg( dep.package_name, dep_target_os, "version")
if ver_svr.nil? then return false end
- if not dep.match? ver_svr then return false end
+ if not dep.match? ver_svr then return false end
return true
end
@cleaner=nil
@prjmgr = ProjectManager.new(self)
@distmgr = DistributionManager.new(self)
- #
+ #
@transport_path = "#{@path}/transport"
@cancel_lock = Mutex.new
@supported_os_list = []
curr_time = Time.now
if (curr_time - start_time).to_i > @test_time then
puts "Test time is elapsed!"
- break
+ break
end
else
sleep 1
@friend_servers.each do |svr|
if svr.ip.eql? ip and svr.port == port then
return false
- end
+ end
end
# create new one, and add it into list
if svr.ip.eql? ip and svr.port == port then
@friend_servers.delete svr
return true
- end
+ end
end
# if not exist, return false
@supported_os_list.each do |os|
if os.eql? os_name then
return false
- end
+ end
end
# add it into list
if os.eql? os_name then
@supported_os_list.delete os
return true
- end
+ end
end
return false
end
- # get remote server
+ # get remote server
def get_available_server ( job )
candidates = []
if job.type != "REGISTER" and job.type != "MULTIBUILD" then
@friend_servers.each do |server|
if ( server.status == "RUNNING" and server.can_build?( job ) and
- not server.has_waiting_jobs and
- server.get_file_transfer_cnt() == 0 and
+ not server.has_waiting_jobs and
+ server.get_file_transfer_cnt() == 0 and
server.get_number_of_empty_room > 0 )
candidates.push server
end
best_server = candidates[0]
if best_server.nil? or candidates.count == 1 then return best_server end
- # get best
+ # get best
# it is better if working jobs count is less
max_empty_room = best_server.get_number_of_empty_room
candidates.each do |server|
- # check whether idle, use it
+ # check whether idle, use it
if not server.has_working_jobs then return server end
# skip
#if not found, check friends
@friend_servers.each do |server|
- if server.status == "RUNNING" and
+ if server.status == "RUNNING" and
job.can_be_built_on? server.host_os then
return true
end
# remote_build_servers
db.do "DELETE FROM remote_build_servers"
- @friend_servers.each { |svr|
+ @friend_servers.each do |svr|
db.do "INSERT INTO remote_build_servers (svr_addr) VALUES('#{svr.ip}:#{svr.port}')"
- }
-
- # sync_pkg_servers
- db.do "DELETE FROM sync_pkg_servers"
- @remote_pkg_servers.each { |svr|
- db.do "INSERT INTO sync_pkg_servers (pkgsvr_url,period,dist_name) VALUES('#{svr[0]}','#{@pkg_sync_period}','#{svr[1]}')"
- }
-
- # supported_os
- db.do "DELETE FROM supported_os"
- @supported_os_list.each { |os|
- db.do "INSERT INTO supported_os VALUES('#{os}')"
- }
-
- rescue DBI::DatabaseError => e
- puts "DB update failed!"
- puts e.errstr
- result = false
- ensure
- db.disconnect if db
end
- return result
- end
-
-
- # load
- def load_db()
-
- result = true
- # distribution
- @distmgr.load_db()
-
- sqlite_db_file = "DBI:SQLite3:#{BuildServer::CONFIG_ROOT}/#{@id}/server.db"
- begin
- # open DB
- db = DBI.connect(sqlite_db_file)
- #db.results_as_hash = true
-
- # remote_build_servers
- @friend_servers = []
- sth = db.execute "SELECT * FROM remote_build_servers"
- sth.fetch_hash { |row|
- add_remote_server(
- row['svr_addr'].split(":")[0],
- row['svr_addr'].split(":")[1].to_i)
- }
- sth.finish
- # sync_package_servers
- @remote_pkg_servers = []
- sth = db.execute "SELECT * FROM sync_pkg_servers"
- sth.fetch_hash { |row|
- add_sync_package_server( row['pkgsvr_url'], row['dist_name'] )
- @pkg_sync_period = row['period'].to_i
- }
- sth.finish
+ # sync_pkg_servers
+ db.do "DELETE FROM sync_pkg_servers"
+ @remote_pkg_servers.each do |svr|
+ db.do "INSERT INTO sync_pkg_servers (pkgsvr_url,period,dist_name) VALUES('#{svr[0]}','#{@pkg_sync_period}','#{svr[1]}')"
+ end
# supported_os
- @supported_os_list = []
- sth = db.execute "SELECT * FROM supported_os"
- sth.fetch_hash { |row|
- add_target_os( row['name'] )
- }
- sth.finish
- rescue DBI::DatabaseError => e
- puts "DB loading failed!"
- puts e.errstr
- result = false
- ensure
- sth.finish if not sth.finished?
- db.disconnect if db
- end
- return result
- end
+ db.do "DELETE FROM supported_os"
+ @supported_os_list.each do |os|
+ db.do "INSERT INTO supported_os VALUES('#{os}')"
+ end
+
+ rescue DBI::DatabaseError => e
+ puts "DB update failed!"
+ puts e.errstr
+ result = false
+ ensure
+ db.disconnect if db
+ end
+
+ return result
+ end
+
+
+ # load
+ def load_db()
+
+ result = true
+ # distribution
+ @distmgr.load_db()
+
+ sqlite_db_file = "DBI:SQLite3:#{BuildServer::CONFIG_ROOT}/#{@id}/server.db"
+ begin
+ # open DB
+ db = DBI.connect(sqlite_db_file)
+ #db.results_as_hash = true
+
+ # remote_build_servers
+ @friend_servers = []
+ sth = db.execute "SELECT * FROM remote_build_servers"
+ sth.fetch_hash do |row|
+ add_remote_server(
+ row['svr_addr'].split(":")[0],
+ row['svr_addr'].split(":")[1].to_i)
+ end
+ sth.finish
+ # sync_package_servers
+ @remote_pkg_servers = []
+ sth = db.execute "SELECT * FROM sync_pkg_servers"
+ sth.fetch_hash do |row|
+ add_sync_package_server( row['pkgsvr_url'], row['dist_name'] )
+ @pkg_sync_period = row['period'].to_i
+ end
+ sth.finish
+
+ # supported_os
+ @supported_os_list = []
+ sth = db.execute "SELECT * FROM supported_os"
+ sth.fetch_hash do |row|
+ add_target_os( row['name'] )
+ end
+ sth.finish
+ rescue DBI::DatabaseError => e
+ puts "DB loading failed!"
+ puts e.errstr
+ result = false
+ ensure
+ sth.finish if not sth.finished?
+ db.disconnect if db
+ end
+ return result
+ end
end
@@instance_map[id].log = Log.new( "#{BuildServer::CONFIG_ROOT}/#{id}/log" )
# add distribution
- if not pkgsvr_url.nil? then
+ if not pkgsvr_url.nil? then
@@instance_map[id].distmgr.add_distribution("BASE", pkgsvr_url, pkgsvr_addr, pkgsvr_port)
end
# check instance first
if not @@instance_map[id] == nil
- return @@instance_map[id]
+ return @@instance_map[id]
end
- # check server config
- if not File.exist? "#{BuildServer::CONFIG_ROOT}/#{id}/server.cfg"
+ # check server config
+ if not File.exist? "#{BuildServer::CONFIG_ROOT}/#{id}/server.cfg"
raise RuntimeError, "The server \"#{id}\" does not exist!"
end
def self.start_server( id, port = 2222 )
server = get_server(id)
- # write run port
+ # write run port
server_dir = "#{BuildServer::CONFIG_ROOT}/#{server.id}"
f = File.open( "#{server_dir}/run", "w" )
f.puts port
- f.close
+ f.close
# start
server.port = port
end
end
- # terminate
+ # terminate
client.terminate
- if not stop_ok then
+ if not stop_ok then
puts "Server stop failed!"
end
end
end
- # terminate
+ # terminate
client.terminate
- if not upgrade_ok then
+ if not upgrade_ok then
puts "Server upgrade failed!"
end
client = BuildCommClient.create( ip, port )
if client.nil? then
puts "Friend Server #{ip}:#{port} is not running!"
- next
+ next
end
# send request
upgrade_ok = false
end
end
- # terminate
+ # terminate
client.terminate
- if upgrade_ok then
+ if upgrade_ok then
puts "Friend Server #{ip}:#{port} upgrade requested!"
else
puts "Friend Server #{ip}:#{port} upgrade failed!"
end
else
puts "No Friend Server."
- end
+ end
return true
end
def self.add_sync_package_server(id, url, dist_name)
server = get_server(id)
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
# add
if server.add_sync_package_server( url, dist_name ) then
def self.remove_sync_package_server(id, url, dist_name)
server = get_server(id)
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
# remove
if server.remove_sync_package_server( url, dist_name ) then
# get server
server = get_server(id)
- # get supported os for project.
+ # get supported os for project.
# if not specified, all supported os of the server will be used
if os_string.nil? or os_string.empty? then
os_list = server.supported_os_list
end
# add
- if git_repos.nil? or git_branch.nil? then
+ if git_repos.nil? or git_branch.nil? then
puts "Git repository or branch must be specified!"
return false
end
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
result = server.prjmgr.add_git_project( project_name, git_repos, git_branch, passwd, os_list, dist_name )
if result then
# get server
server = get_server(id)
- # get supported os for project.
+ # get supported os for project.
# if not specified, all supported os of the server will be used
if os_string.nil? or os_string == "default" then
os_list = server.supported_os_list
return false
end
end
- end
+ end
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
# add
result = server.prjmgr.add_binary_project( project_name, pkg_name, passwd,
# get server
server = get_server(id)
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
result = server.prjmgr.remove_project( project_name, dist_name )
if result then
# server
server = get_server(id)
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
client = BuildCommClient.create( "127.0.0.1", server.port )
if client.nil? then
end
end
- # terminate
+ # terminate
client.terminate
- if not fullbuild_ok then
+ if not fullbuild_ok then
puts "Full build failed!"
end
# server
server = get_server(id)
- # check distribution
+ # check distribution
dist_name = check_distribution_name(dist_name, server)
- if dist_name.nil? then return false end
+ if dist_name.nil? then return false end
# check file exist?
if not File.exist? file_path then
end
end
- # terminate
+ # terminate
client.terminate
- if not success then
+ if not success then
puts "Registering package failed!"
end
server.prjmgr.load()
puts "* REMOTE SERVER(S) *"
- server.friend_servers.each { |s|
+ server.friend_servers.each do |s|
puts " * #{s.ip}:#{s.port}"
- }
+ end
puts ""
puts "* SUPPORTED OS *"
- server.supported_os_list.each { |os|
+ server.supported_os_list.each do |os|
puts " * #{os}"
- }
+ end
puts ""
puts "* DISTRIBUTION(S) *"
- server.distmgr.get_all_distributions().each { |d|
+ server.distmgr.get_all_distributions().each do |d|
puts " * #{d.name}"
- }
+ end
puts ""
puts "* SYNC PACKAGE SERVER(S) *"
- server.remote_pkg_servers.each { |s|
+ server.remote_pkg_servers.each do |s|
puts " * [#{s[1]}] #{s[0]}"
- }
+ end
puts ""
puts "* PROJECT(S) *"
- server.prjmgr.projects.each { |p|
+ server.prjmgr.projects.each do |p|
puts " * [#{p.dist_name}] #{p.name}"
- }
+ end
end
when "MAX_WORKING_JOBS"
server.jobmgr.max_working_jobs = value.to_i
when "JOB_LOG_URL"
- server.job_log_url = value
+ server.job_log_url = value
when "SEND_MAIL"
server.send_mail = value
when "TEST_TIME"
# read configuration
server_dir = "#{BuildServer::CONFIG_ROOT}/#{id}"
File.open( "#{server_dir}/server.cfg", "r" ) do |f|
- f.each_line do |l|
+ f.each_line do |l|
idx = l.index("=") + 1
- length = l.length - idx
+ length = l.length - idx
if l.start_with?("PATH=")
path = l[idx,length].strip
elsif l.start_with?("CHANGELOG_CHECK=")
changelog_check = true if l[idx,length].strip =~ /true/i
else
- next
- end
+ next
+ end
end
end
# set send mail
obj.send_mail = send_mail
- # set test time
+ # set test time
obj.test_time = test_time
# set password
when "create"
if options[:name].nil? or options[:name].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::CREATE
- end
+ end
when "remove"
- if options[:name].nil? or options[:name].empty? then
+ if options[:name].nil? or options[:name].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::REMOVE
end
end
when "add-svr"
- if options[:name].nil? or options[:name].empty? or
+ if options[:name].nil? or options[:name].empty? or
(options[:domain].nil? or options[:domain].empty?) then
raise ArgumentError, "Usage: " + BuildServerUsage::ADD_SVR
end
if options[:name].nil? or options[:name].empty? or
options[:os].nil? or options[:os].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::ADD_OS
- end
+ end
when "remove-os"
if options[:name].nil? or options[:name].empty? or
options[:os].nil? or options[:os].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::REMOVE_OS
- end
+ end
when "add-dist"
if options[:name].nil? or options[:name].empty? or
options[:url].nil? or options[:url].empty? or
options[:domain].nil? or options[:domain].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::ADD_DIST
- end
+ end
when "remove-dist"
if options[:name].nil? or options[:name].empty? or
options[:dist].nil? or options[:dist].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::REMOVE_DIST
- end
+ end
when "add-sync"
- if options[:name].nil? or options[:name].empty? or
+ if options[:name].nil? or options[:name].empty? or
(options[:url].nil? or options[:url].empty?) then
raise ArgumentError, "Usage: " + BuildServerUsage::ADD_SYNC
end
when "remove-sync"
- if options[:name].nil? or options[:name].empty? or
+ if options[:name].nil? or options[:name].empty? or
(options[:url].nil? or options[:url].empty?) then
raise ArgumentError, "Usage: " + BuildServerUsage::REMOVE_SYNC
end
if options[:name].nil? or options[:name].empty? or
options[:pid].nil? or options[:pid].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::ADD_PRJ
- end
+ end
when "remove-prj"
if options[:name].nil? or options[:name].empty? or
options[:pid].nil? or options[:pid].empty? then
raise ArgumentError, "Usage: " + BuildServerUsage::REMOVE_PRJ
- end
+ end
when "fullbuild"
if options[:name].nil? or options[:name].empty? then
else
raise ArgumentError, "Input is incorrect : #{options[:cmd]}"
- end
+ end
end
-def option_parse
+def option_parse
options = {}
banner = "Build-server administer service command-line tool." + "\n" \
+ "\n" + "Usage: build-svr <SUBCOMMAND> [OPTS] or build-svr (-h|-v)" + "\n" \
# Set a banner, displayed at the top
# of the help screen.
- opts.banner = banner
+ opts.banner = banner
opts.on( '-n', '--name <server name>', 'build server name' ) do|name|
- options[:name] = name
+ options[:name] = name
end
opts.on( '-u', '--url <package server url>', 'package server url: http://127.0.0.1/dibs/unstable' ) do|url|
- options[:url] = url
+ options[:url] = url
end
opts.on( '-d', '--address <server address>', 'server address: 127.0.0.1:2224' ) do|domain|
end
- cmd = ARGV[0]
- if cmd.eql? "create" or cmd.eql? "remove" or
+ cmd = ARGV[0]
+ if cmd.eql? "create" or cmd.eql? "remove" or
cmd.eql? "start" or cmd.eql? "upgrade" or
- cmd.eql? "stop" or
+ cmd.eql? "stop" or
cmd.eql? "add-svr" or cmd.eql? "remove-svr" or
- cmd.eql? "add-os" or cmd.eql? "remove-os" or
- cmd.eql? "add-dist" or cmd.eql? "remove-dist" or
- cmd.eql? "add-sync" or cmd.eql? "remove-sync" or
- cmd.eql? "add-prj" or cmd.eql? "remove-prj" or
- cmd.eql? "fullbuild" or cmd.eql? "register" or
+ cmd.eql? "add-os" or cmd.eql? "remove-os" or
+ cmd.eql? "add-dist" or cmd.eql? "remove-dist" or
+ cmd.eql? "add-sync" or cmd.eql? "remove-sync" or
+ cmd.eql? "add-prj" or cmd.eql? "remove-prj" or
+ cmd.eql? "fullbuild" or cmd.eql? "register" or
cmd.eql? "query" or
- cmd.eql? "set-attr" or cmd.eql? "get-attr" or
- cmd =~ /(-v)|(--version)/ or
+ cmd.eql? "set-attr" or cmd.eql? "get-attr" or
+ cmd =~ /(-v)|(--version)/ or
cmd =~ /(help)|(-h)|(--help)/ then
- if cmd.eql? "help" then
- ARGV[0] = "-h"
+ if cmd.eql? "help" then
+ ARGV[0] = "-h"
end
options[:cmd] = ARGV[0]
else
- raise ArgumentError, "Usage: build-svr <SUBCOMMAND> [OPTS] or build-svr -h"
+ raise ArgumentError, "Usage: build-svr <SUBCOMMAND> [OPTS] or build-svr -h"
end
optparse.parse!
option_error_check options
return options
-end
+end
@server = server
if not server.jobmgr.nil? then
@id = server.jobmgr.get_new_job_id()
- else
+ else
@id = 0
end
end
- # set logger
+ # set logger
public
def set_logger( logger )
@log = logger
# parent job will call sub job's terminate method
if not is_sub_job? then terminate() end
- rescue => e
- @log.error e.message
- @log.error e.backtrace.inspect
- end
+ rescue => e
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ end
end
if sync then
@thread.join
end
- return true
+ return true
end
# PROTECTED METHODS
#
- # main module
+ # main module
protected
def job_main
# do nothing
# write to file
info_file = "#{@path}/extra"
File.open( info_file, "w" ) do |f|
- @extra_infos.each { |key,value|
+ @extra_infos.each do |key,value|
f.puts "#{key} : #{value}"
- }
+ end
end
end
# get distribution
def get_distribution(name)
- @distributions.each { |d|
+ @distributions.each do |d|
if d.name == name then
return d
end
- }
+ end
return nil
end
# check exist?
- def distribution_exist?(name)
+ def distribution_exist?(name)
return get_distribution(name).nil? ? false : true
end
- # add
+ # add
def add_distribution(name, pkgsvr_url, pkgsvr_ip, pkgsvr_port)
load_db()
if distribution_exist?(name) then
@distributions.each do |dist|
dist_addr = dist.pkgsvr_ip + ":" + dist.pkgsvr_port.to_s
db.do "INSERT INTO distributions VALUES('#{dist.name}','#{dist.pkgsvr_url}','#{dist_addr}')"
- end
- rescue DBI::DatabaseError => e
- puts "DB update failed!"
- puts e.errstr
- result = false
- ensure
- db.disconnect if db
end
- return result
+ rescue DBI::DatabaseError => e
+ puts "DB update failed!"
+ puts e.errstr
+ result = false
+ ensure
+ db.disconnect if db
+ end
+ return result
end
# if succeeded, register source info and copy pkginfo.manifest
@log.info( "Updating the source info for project \"#{@project.name}\"" , Log::LV_USER)
@project.add_source_info( @pkginfo.get_version(), @git_commit)
- @project.copy_package_info( @pkginfo.get_version(),
+ @project.copy_package_info( @pkginfo.get_version(),
"#{@source_path}/package/pkginfo.manifest")
@project.set_log_cnt( @log.cnt )
@project.write_ext_info
Builder.remove("JB#{@id}")
end
- # send mail
- if ( @server.send_mail.eql? "YES" ) and ( not @pkginfo.nil? ) and ( not @pkginfo.packages.nil? ) then
+ # send mail
+ if ( @server.send_mail.eql? "YES" ) and ( not @pkginfo.nil? ) and ( not @pkginfo.packages.nil? ) then
mail_list = []
contents = []
contents.push " "
contents.push "%-30s| %10s | %10s" % ["package name", "version", "os"]
contents.push "---------------------------------------------------------------"
@pkginfo.packages.each do |pkg|
- if not pkg.os.eql? @os then next end
- mail_list = mail_list | Mail.parse_email( pkg.maintainer )
+ if not pkg.os.eql? @os then next end
+ mail_list = mail_list | Mail.parse_email( pkg.maintainer )
contents.push("%-30s| %10s | %10s" % [ pkg.package_name, pkg.version, pkg.os] )
end
# if internal job, copy external_pkgs
if @is_internal_job then
@log.info( "Copying external dependent pkgs...", Log::LV_USER)
- ext_pkgs_dir = "#{@job_root}/external_pkgs"
+ ext_pkgs_dir = "#{@job_root}/external_pkgs"
incoming_dir = "#{@server.transport_path}/#{@dock_num}"
if File.exist? incoming_dir then
protected
def get_source_code()
- $git_mutex.synchronize {
+ $git_mutex.synchronize do
get_source_code_internal()
- }
+ end
end
if result_line != nil then
result_line.each do |l|
if l.start_with?("commit ") then
- commit_id = l.split(" ")[1].strip
+ commit_id = l.split(" ")[1].strip
end
end
end
build_command = "cd \"#{working_dir}\";#{@server.git_bin_path} #{cmd}"
pid, status = execute_command( build_command )
- if not status.nil? and status.exitstatus != 0 then
- return false
+ if not status.nil? and status.exitstatus != 0 then
+ return false
else
return true
end
# initialize
def initialize( name, server, os_list, dist_name, repos = nil, branch = nil )
super(name, "GIT", server, os_list, dist_name)
- @repository = repos
+ @repository = repos
@branch = branch
@source_infos = {}
@package_infos = {}
return new_job
end
- # get latest package version
+ # get latest package version
def get_latest_version()
versions = @package_infos.keys
if not versions.empty? then
end
- # get all package version
+ # get all package version
def get_all_versions()
return @package_infos.keys
end
# write to file
sources_file = "#{@path}/sources"
File.open( sources_file, "w" ) do |f|
- @source_infos.each { |key,value|
+ @source_infos.each do |key,value|
f.puts "#{key},#{value}"
- }
+ end
end
end
repos="none"
branch="master"
File.open( config_file, "r" ) do |f|
- f.each_line do |l|
+ f.each_line do |l|
idx = l.index("=") + 1
- length = l.length - idx
+ length = l.length - idx
if l.start_with?("GIT_REPOSITORY=")
repos = l[idx,length].strip
elsif l.start_with?("GIT_BRANCH=")
branch = l[idx,length].strip
else
- next
- end
+ next
+ end
end
end
@repository = repos
add_source_info( version, info )
end
- end
+ end
end
# read pkginfo
Dir.new(pkginfo_dir).entries.each do |file|
if file.eql? "." or file.eql? ".." then next end
- vlen = file.length - ".manifest".length
+ vlen = file.length - ".manifest".length
version = file[0,vlen]
add_package_info( version, "#{pkginfo_dir}/#{file}" )
end
def init
- $access_listfile.synchronize {
+ $access_listfile.synchronize do
File.open(@list_file, "a") do |f|
- f.puts "#{@job_id},#{time.year},#{time.month},#{time.day},#{time.hour},#{time.min},#{time.sec}"
+ f.puts "#{@job_id},#{time.year},#{time.month},#{time.day},#{time.hour},#{time.min},#{time.sec}"
end
- }
+ end
end
if File.exist? "#{@job_path}/buildroot" then
FileUtils.rm_rf "#{@job_path}/buildroot"
end
- if File.exist? "#{@job_path}/temp" then
+ if File.exist? "#{@job_path}/temp" then
FileUtils.rm_rf "#{@job_path}/temp"
end
- if File.exist? "#{@job_path}/external_pkgs" then
+ if File.exist? "#{@job_path}/external_pkgs" then
FileUtils.rm_rf "#{@job_path}/external_pkgs"
end
# remove line for the job
- $access_listfile.synchronize {
+ $access_listfile.synchronize do
lines = []
# get all lines
if File.exist? @list_file then
f.puts l
end
end
- }
- end
+ end
+ end
end
-class JobCleaner
+class JobCleaner
attr_accessor :quit
# init
FileUtils.mv(@list_file,list_file2)
File.open(list_file2, "r") do |f|
f.each_line do |l|
- id = l.split(",")[0]
- year = l.split(",")[1]
- month = l.split(",")[2]
- day = l.split(",")[3]
- hour = l.split(",")[4]
- min = l.split(",")[5]
+ id = l.split(",")[0]
+ year = l.split(",")[1]
+ month = l.split(",")[2]
+ day = l.split(",")[3]
+ hour = l.split(",")[4]
+ min = l.split(",")[5]
sec = l.split(",")[6]
- # create job and register
- job_path = "#{jobs_path}/#{id}"
- time = Time.mktime(year.to_i, month.to_i, day.to_i, hour.to_i, min.to_i, sec.to_i)
+ # create job and register
+ job_path = "#{jobs_path}/#{id}"
+ time = Time.mktime(year.to_i, month.to_i, day.to_i, hour.to_i, min.to_i, sec.to_i)
@server.log.info "Registered clean-action for the job in list : #{id}"
@handler.register(JobCleanAction.new(time,job_path,@list_file, @server))
# add clean list
- clean_list.push id
+ clean_list.push id
end
end
end
if id.eql? "." or id.eql? ".." then next end
if not clean_list.include? id then
- job_path = "#{jobs_path}/#{id}"
+ job_path = "#{jobs_path}/#{id}"
time = Time.now
@server.log.info "Registered clean-action for old job : #{id}"
@handler.register(JobCleanAction.new(time,job_path,@list_file, @server))
end
# start handler
- @handler.start
+ @handler.start
end
end
- # clean directly
+ # clean directly
def clean(job_id)
time = Time.now
job_path = "#{@server.path}/jobs/#{job_id}"
=begin
- JobLog.rb
+ JobLog.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
def init
- # comm-begin
+ # comm-begin
if not @second_out.nil? and not @second_out.closed? then
BuildCommServer.send_begin(@second_out)
end
def close
# close communication
if not @second_out.nil? then
- begin
+ begin
if not @second_out.closed? then
BuildCommServer.send_end(@second_out)
end
BuildCommServer.disconnect(@second_out)
end
- @second_out = nil
+ @second_out = nil
end
- def is_connected?
- if @second_out.nil? or @second_out.closed? then
+ def is_connected?
+ if @second_out.nil? or @second_out.closed? then
return false
else
return true
# overide
def output_extra(msg)
- begin
+ begin
if not @second_out.nil? then
BuildCommServer.send( @second_out, msg )
end
@parent_job.cancel_state = "INIT"
end
end
- end
+ end
end
job.status = "WAITING"
end
@parent.log.info "Checking the job \"#{job.id}\" was finished!"
- rescue => e
- @parent.log.error e.message
- @parent.log.error e.backtrace.inspect
- end
+ rescue => e
+ @parent.log.error e.message
+ @parent.log.error e.backtrace.inspect
+ end
end
- @parent.log.info "Job \"#{job.id}\" entered INITIALIZING status"
+ @parent.log.info "Job \"#{job.id}\" entered INITIALIZING status"
end
# start build
job.execute
- @parent.log.info "Moved the job \"#{job.id}\" to working job list"
+ @parent.log.info "Moved the job \"#{job.id}\" to working job list"
end
if job.execute() then
# status change & job control
job.status = "REMOTE_WORKING"
- @parent.log.info "Moved the job \"#{job.id}\" to remote job list"
+ @parent.log.info "Moved the job \"#{job.id}\" to remote job list"
else
- @parent.log.info "Moving the job \"#{job.id}\" to remote failed"
+ @parent.log.info "Moving the job \"#{job.id}\" to remote failed"
end
end
def cancel_job( job)
job.cancel_state = "WORKING"
- @parent.log.info "Creating thread for canceling the job \"#{job.id}\""
+ @parent.log.info "Creating thread for canceling the job \"#{job.id}\""
Thread.new do
begin
#terminate job thread
# call terminate process for job
job.terminate
- rescue => e
- @parent.log.error e.message
- @parent.log.error e.backtrace.inspect
- end
+ rescue => e
+ @parent.log.error e.message
+ @parent.log.error e.backtrace.inspect
+ end
end
end
job_list.each do |job|
# if "ERROR", "FINISHED", "CANCELED" remove it from list
if job.status == "ERROR"
- @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
- @reverse_build_jobs.delete job
- elsif job.status == "FINISHED"
- @parent.log.info "Job \"#{job.id}\" is removed by FINISH status"
+ @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
@reverse_build_jobs.delete job
- elsif job.status == "CANCELED"
- @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status"
+ elsif job.status == "FINISHED"
+ @parent.log.info "Job \"#{job.id}\" is removed by FINISH status"
+ @reverse_build_jobs.delete job
+ elsif job.status == "CANCELED"
+ @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status"
@reverse_build_jobs.delete job
end
job_list.each do |job|
# if "ERROR", "FINISHED", "CANCELED" remove it from list
if job.status == "ERROR"
- @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
- @internal_jobs.delete job
- elsif job.status == "FINISHED"
- @parent.log.info "Job \"#{job.id}\" is removed by FINISH status"
+ @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
+ @internal_jobs.delete job
+ elsif job.status == "FINISHED"
+ @parent.log.info "Job \"#{job.id}\" is removed by FINISH status"
@internal_jobs.delete job
- elsif job.status == "CANCELED"
- @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status"
+ elsif job.status == "CANCELED"
+ @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status"
@internal_jobs.delete job
end
job_list.each do |job|
# if "ERROR", "FINISHED", "CANCELED" remove it from list
if job.status == "ERROR"
- @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
- @jobs.delete job
- elsif job.status == "FINISHED"
- @parent.log.info "Job \"#{job.id}\" is removed by FINISH status"
+ @parent.log.info "Job \"#{job.id}\" is stopped by ERROR"
+ @jobs.delete job
+ elsif job.status == "FINISHED"
+ @parent.log.info "Job \"#{job.id}\" is removed by FINISH status"
@jobs.delete job
- elsif job.status == "CANCELED"
- @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status"
+ elsif job.status == "CANCELED"
+ @parent.log.info "Job \"#{job.id}\" is removed by CANCELED status"
@jobs.delete job
end
end
# check the connection if job is not asynchronous job
- if ( job.status == "WAITING" or job.status == "REMOTE_WORKING" or job.status == "PENDING") and
- not job.is_asynchronous_job? and
+ if ( job.status == "WAITING" or job.status == "REMOTE_WORKING" or job.status == "PENDING") and
+ not job.is_asynchronous_job? and
not job.is_connected? then
job.status = "ERROR"
# oherwise, check remote server
rserver = @parent.get_available_server( job )
if rserver != nil and rserver == @parent then
- execute(job)
+ execute(job)
elsif rserver != nil then
execute_remote(job, rserver)
else
# select reverse build job with round-robin method
selected_job = nil
@reverse_build_jobs.each do |job|
- if job.status == "WAITING" then
+ if job.status == "WAITING" then
selected_job = job
break
end
end
# rotate array
- if @reverse_build_jobs.count > 0 then
+ if @reverse_build_jobs.count > 0 then
@reverse_build_jobs.push @reverse_build_jobs.shift
end
if not selected_job.nil? then return selected_job end
(@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WORKING" then
working_cnt = working_cnt + 1
- end
+ end
# must exclude parent job
if not job.get_parent_job().nil? then
(@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WORKING" then
result.push job
- end
+ end
end
- return result
+ return result
end
(@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "WAITING" then
result.push job
- end
+ end
end
- return result
+ return result
end
(@jobs + @internal_jobs + @reverse_build_jobs).each do |job|
if job.status == "REMOTE_WORKING" then
result.push job
- end
+ end
end
- return result
+ return result
end
@jobs.each do |job|
if job.status == "PENDING" then
result.push job
- end
+ end
end
- return result
+ return result
end
elsif cjob.status == "PENDING" and (not job.does_depend_on? cjob) and
(job.has_build_dependency?( cjob ) or job.is_compatible_with?( cjob)) then
pre_jobs.push cjob
- elsif check_dep_wait and cjob.status == "WAITING" and
+ elsif check_dep_wait and cjob.status == "WAITING" and
(job.does_depend_on? cjob or
(job.id > cjob.id and job.is_compatible_with? cjob) ) then
pre_jobs.push cjob
# check pre-requisite jobs are changed, notify to user
is_changed = false
- if pre_jobs.count != job.pre_jobs.count then
- is_changed=true
+ if pre_jobs.count != job.pre_jobs.count then
+ is_changed=true
else
pre_jobs.each do |pjob|
if not job.pre_jobs.include? pjob then
end
job.pre_jobs = pre_jobs
- # no pre-requisite jobs, return its job
+ # no pre-requisite jobs, return its job
if job.pre_jobs.count == 0 then
return job
end
end
end
- #
+ #
def init
# mkdir
- if not File.exist? @job_root then
+ if not File.exist? @job_root then
FileUtils.mkdir_p @job_root
end
# create source path
if not File.exist? @source_path then
FileUtils.mkdir_p @source_path
- end
+ end
# initialize all sub jobs and add them to "internal_jobs"
@sub_jobs.each do |job|
# compare build dependency
get_build_dependencies(@os).each do |dep|
wjob.get_packages().each do |wpkg|
- # dep packages of my job must have same name and target os
+ # dep packages of my job must have same name and target os
# with packages in working job
if dep.package_name == wpkg.package_name and
dep.target_os_list.include? wjob.os then
get_packages().each do |pkg|
wjob.get_build_dependencies(wjob.os).each do |dep|
- # dep package of working job must have same name and target os
+ # dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == pkg.package_name and
dep.target_os_list.include? @os then
#
- # main module
+ # main module
protected
def job_main()
@log.info( "Invoking a thread for MULTI-BUILD Job #{@id}", Log::LV_USER)
end
# add to internal job
- @server.jobmgr.internal_job_schedule.synchronize {
+ @server.jobmgr.internal_job_schedule.synchronize do
@sub_jobs.each do |job|
# init finished, add internal_jobs
@server.jobmgr.add_internal_job(job)
@log.info( " * Log URL : #{@server.job_log_url}/#{job.id}/log", Log::LV_USER)
end
end
- }
+ end
# show job status changes
all_jobs_finished = false
# check there is some error or cancel
if stop_status == "FINISHED" and
(job.status == "ERROR" or job.status == "CANCELED") then
- # write url
+ # write url
write_log_url(job)
# cancel all other un-finished jobs
@sub_jobs.each do |sub|
- if sub.status != "ERROR" and sub.status != "FINISHED" and
+ if sub.status != "ERROR" and sub.status != "FINISHED" and
sub.status != "CANCELED" and sub.cancel_state == "NONE" then
@log.info(" * Sub-Job \"#{sub.get_project().name}(#{sub.os})\" has entered \"CANCELING\" state. (#{sub.id})", Log::LV_USER)
sub.cancel_state = "INIT"
end
end
- #
+ #
sleep 1
end
if stop_status == "ERROR" or stop_status == "CANCELED" then
@status = stop_status
- return
+ return
end
# upload
end
- private
+ private
def upload()
@log.info( "Uploading ...", Log::LV_USER)
def init
# create directory
if File.exist? @download_path then
- FileUtils.rm_rf @download_path
- FileUtils.rm_rf @original_path
+ FileUtils.rm_rf @download_path
+ FileUtils.rm_rf @original_path
else
FileUtils.mkdir_p @download_path
FileUtils.mkdir_p @original_path
# if updates are found, download them
downloaded_files = []
- pkgs.each { |pkg|
+ pkgs.each do |pkg|
pkg_name=pkg[0]; os=pkg[1]
files = @pkgsvr_client.download(pkg_name, os, false)
downloaded_files += files
- }
+ end
# request to register
registered_jobs = []
- downloaded_files.each { |file_path|
+ downloaded_files.each do |file_path|
@server.log.info "Creating new job for registering \"#{file_path}\""
new_job = @server.jobmgr.create_new_register_job( file_path )
logger = JobLog.new( new_job, nil )
# add
@server.jobmgr.add_job( new_job )
registered_jobs.push new_job
- }
+ end
# wait for finish all jobs
all_jobs_finished = false
while not all_jobs_finished
- unfinished_jobs = registered_jobs.select { |j|
+ unfinished_jobs = registered_jobs.select do |j|
(j.status != "ERROR" and j.status != "FINISHED" and j.status != "CANCELED")
- }
- if unfinished_jobs.empty? then
- all_jobs_finished = true
+ end
+ if unfinished_jobs.empty? then
+ all_jobs_finished = true
else
sleep 10
end
end
- # remove files
- downloaded_files.each { |file_path|
+ # remove files
+ downloaded_files.each do |file_path|
@server.log.info "Removed downloaded file: \"#{file_path}\""
FileUtils.rm_rf file_path
- }
- end
+ end
+ end
protected
# for all BINARY project
bin_prjs = @server.prjmgr.projects.select { |p| (p.type == "BINARY") }
- bin_prjs.each { |p|
+ bin_prjs.each do |p|
pkg_name = p.pkg_name
- p.os_list.each { |os|
+ p.os_list.each do |os|
# get pkg version in server
main_ver = @main_client.get_attr_from_pkg(pkg_name, os, "version")
if main_ver.nil? then next end
if Version.new(main_ver) < Version.new(remote_ver) then
pkgs.push [pkg_name, os]
end
- }
- }
+ end
+ end
return pkgs
end
def start()
time = Time.new + 60
- @server.remote_pkg_servers.each { |entry|
+ @server.remote_pkg_servers.each do |entry|
url=entry[0]; dist_name=entry[1]
@handler.register( PackageSyncAction.new(time, url, dist_name, @server) )
@server.log.info "Registered package-sync action!: #{dist_name} <= \"#{url}\""
- }
+ end
# start handler
- @handler.start
+ @handler.start
end
end
# create sub jobs
@projects.each do |prj|
- if prj.type != "GIT" then next end
+ if prj.type != "GIT" then next end
if prj.dist_name != dist_name then next end
prj.os_list.each do |os|
new_job = create_new_job( prj.name, os, dist_name )
if new_job.nil? then next end
- # This make project to build
+ # This make project to build
# even though there is a package of same version on pkg-server
new_job.set_force_rebuild(true)
# add to multi job
- result.add_sub_job( new_job )
- end
+ result.add_sub_job( new_job )
+ end
end
return result
ver = pkg.version
os = pkg.os
- # check project provide target package
+ # check project provide target package
if prj.include_package?(name, ver, os) then
result.push [prj, os, ver]
break
end
- end
+ end
end
return result
@projects.each do |prj|
# check project's distribution
if prj.dist_name != dist_name then next end
- # check project provide target package
+ # check project provide target package
if prj.include_package?(pkg_name) then
return prj
end
@projects.each do |prj|
# check project's distribution
if prj.dist_name != dist_name then next end
- if prj.type == "GIT" and prj.repository == repos then
- return prj
+ if prj.type == "GIT" and prj.repository == repos then
+ return prj
end
end
# add
add_git_project(name , repos, branch, passwd, os_list, dist_name)
# get
- return get_project(name, dist_name)
+ return get_project(name, dist_name)
end
prj_id = db.select_one("select last_insert_rowid()")[0]
prj.os_list.each do |os|
db.do "INSERT INTO project_os VALUES('#{prj_id}','#{os}')"
- end
-
- if not prj.save_db() then raise RuntimeError ,"Save project DB is failed!" end
- end
- rescue DBI::DatabaseError => e
- puts "DB update failed!"
- puts e.errstr
- result = false
- ensure
- db.disconnect if db
end
- return result
+
+ if not prj.save_db() then raise RuntimeError ,"Save project DB is failed!" end
end
+ rescue DBI::DatabaseError => e
+ puts "DB update failed!"
+ puts e.errstr
+ result = false
+ ensure
+ db.disconnect if db
+ end
+ return result
+ end
- # load
- def load_db()
- result = true
- @projects = []
-
- sqlite_db_file = "DBI:SQLite3:#{BuildServer::CONFIG_ROOT}/#{@server.id}/server.db"
- begin
- # open DB
- db = DBI.connect(sqlite_db_file)
-
- # distributions
- rs = db.execute "SELECT * FROM projects"
- rs.fetch_hash do |row|
- prj_id = row['id']
- prj_name = row['name']
- prj_type = row['type']
- prj_passwd = row['password']
- prj_dist = row['dist_name']
-
- @server.log.info "Loading project : #{prj_name}"
- # os
- prj_os_list = []
- rs2 = db.execute "SELECT os_name FROM project_os WHERE prj_id = #{prj_id}"
- rs2.fetch do |row2|
- prj_os_list.push row2[0]
- end
- rs2.finish
- if prj_type == "GIT" then
- new_project = GitBuildProject.new(prj_name, @server, prj_os_list, prj_dist)
- else
- new_project = BinaryUploadProject.new(prj_name, @server, prj_os_list, prj_dist)
- end
-
- if not prj_passwd.empty? then
- new_project.passwd = prj_passwd
- end
-
- if not new_project.load_db() then raise RuntimeError, "Project DB load failed!" end
- @projects.push new_project
+ # load
+ def load_db()
+ result = true
+ @projects = []
+
+ sqlite_db_file = "DBI:SQLite3:#{BuildServer::CONFIG_ROOT}/#{@server.id}/server.db"
+ begin
+ # open DB
+ db = DBI.connect(sqlite_db_file)
+
+ # distributions
+ rs = db.execute "SELECT * FROM projects"
+ rs.fetch_hash do |row|
+ prj_id = row['id']
+ prj_name = row['name']
+ prj_type = row['type']
+ prj_passwd = row['password']
+ prj_dist = row['dist_name']
+
+ @server.log.info "Loading project : #{prj_name}"
+ # os
+ prj_os_list = []
+ rs2 = db.execute "SELECT os_name FROM project_os WHERE prj_id = #{prj_id}"
+ rs2.fetch do |row2|
+ prj_os_list.push row2[0]
end
- rs.finish
- rescue DBI::DatabaseError => e
- puts "DB loading failed!"
- puts e.errstr
- result = false
- ensure
- rs.finish if not rs.finished?
- db.disconnect if db
+ rs2.finish
+ if prj_type == "GIT" then
+ new_project = GitBuildProject.new(prj_name, @server, prj_os_list, prj_dist)
+ else
+ new_project = BinaryUploadProject.new(prj_name, @server, prj_os_list, prj_dist)
+ end
+
+ if not prj_passwd.empty? then
+ new_project.passwd = prj_passwd
+ end
+
+ if not new_project.load_db() then raise RuntimeError, "Project DB load failed!" end
+ @projects.push new_project
end
- return result
+ rs.finish
+ rescue DBI::DatabaseError => e
+ puts "DB loading failed!"
+ puts e.errstr
+ result = false
+ ensure
+ rs.finish if not rs.finished?
+ db.disconnect if db
end
+ return result
+ end
- # write configuration
- def write_configuration(name, repos, branch, passwd, os_list )
- config_file = "#{@project_root}/#{name}/build"
- File.open( config_file, "w" ) do |f|
- f.puts "TYPE=GIT"
- if not passwd.nil? and not passwd.empty? then
- f.puts "PASSWD=#{passwd}"
- end
- f.puts "GIT_REPOSITORY=#{repos}"
- f.puts "GIT_BRANCH=#{branch}"
- f.puts "OS_LIST=#{os_list.join(",")}"
+ # write configuration
+ def write_configuration(name, repos, branch, passwd, os_list )
+ config_file = "#{@project_root}/#{name}/build"
+ File.open( config_file, "w" ) do |f|
+ f.puts "TYPE=GIT"
+ if not passwd.nil? and not passwd.empty? then
+ f.puts "PASSWD=#{passwd}"
end
+ f.puts "GIT_REPOSITORY=#{repos}"
+ f.puts "GIT_BRANCH=#{branch}"
+ f.puts "OS_LIST=#{os_list.join(",")}"
end
+ end
- # write configuration
- def write_configuration_for_binary_project(name, pkg_name, passwd, os_list )
- config_file = "#{@project_root}/#{name}/build"
- File.open( config_file, "w" ) do |f|
- f.puts "TYPE=BINARY"
- if not passwd.nil? and not passwd.empty? then
- f.puts "PASSWD=#{passwd}"
- end
- f.puts "PACKAGE_NAME=#{pkg_name}"
- f.puts "OS_LIST=#{os_list.join(",")}"
+ # write configuration
+ def write_configuration_for_binary_project(name, pkg_name, passwd, os_list )
+ config_file = "#{@project_root}/#{name}/build"
+ File.open( config_file, "w" ) do |f|
+ f.puts "TYPE=BINARY"
+ if not passwd.nil? and not passwd.empty? then
+ f.puts "PASSWD=#{passwd}"
end
+ f.puts "PACKAGE_NAME=#{pkg_name}"
+ f.puts "OS_LIST=#{os_list.join(",")}"
end
+ end
end
return false
end
- #
+ #
def init
# mkdir
- if not File.exist? @job_root then
+ if not File.exist? @job_root then
FileUtils.mkdir_p @job_root
end
# create dummy source path
if not File.exist? @source_path then
FileUtils.mkdir_p @source_path
- end
+ end
# copy package file to source path
@file_path = "#{@source_path}/#{File.basename(@local_path)}"
case @pkg_type
when "BINARY"
if @pkg_name == wjob.pkg_name and
- @os == wjob.os then
- return true
+ @os == wjob.os then
+ return true
end
when "ARCHIVE"
if @pkg_name == wjob.pkg_name then return true end
if @pkg_type == "BINARY" then
wjob.get_build_dependencies(wjob.os).each do |dep|
- # dep package of working job must have same name and target os
+ # dep package of working job must have same name and target os
# with packages in my job
if dep.package_name == @pkg_name and
dep.target_os_list.include? @os then
return true
end
end
- end
+ end
return false
end
#
- # main module
+ # main module
protected
def job_main()
@log.info( "Invoking a thread for REGISTER Job #{@id}", Log::LV_USER)
return
end
- # if this package has compatible OS, check
- if @pkg_type == "BINARY" and
+ # if this package has compatible OS, check
+ if @pkg_type == "BINARY" and
@pkginfo.packages[0].os_list.count > 1 then
- pkg = @pkginfo.packages[0]
+ pkg = @pkginfo.packages[0]
pkg.os_list.each do |os|
if @os == os then next end
# skip when there is higher version of the package
ver_svr = @pkgsvr_client.get_attr_from_pkg( pkg.package_name, @os, "version")
- if not ver_svr.nil? and
- Version.new(@pkg_version) <= Version.new(ver_svr) then next end
+ if not ver_svr.nil? and
+ Version.new(@pkg_version) <= Version.new(ver_svr) then next end
# make new package file for compatible OS
- newfile = "#{@pkg_name}_#{@pkg_version}_#{os}.zip"
+ newfile = "#{@pkg_name}_#{@pkg_version}_#{os}.zip"
@log.info( "Copying #{@filename} to #{newfile}" )
FileUtils.cp(@file_path,"#{@source_path}/#{newfile}")
- # reverse check
+ # reverse check
if not ReverseBuildChecker.check( self, true, os ) then
@status = "ERROR"
@log.error( "Reverse-build-check failed!" )
return
end
- end
+ end
end
# upload
@log.info( "Checking reverse build dependency ...", Log::LV_USER)
# get reverse-dependent projects
- rev_pkgs = []
+ rev_pkgs = []
if @pkg_type == "BINARY" then
rev_pkgs += @pkgsvr_client.get_reverse_build_dependent_packages(@pkg_name, target_os)
else
if prj.type != "GIT" then next end
- # create sub jobs for checking
+ # create sub jobs for checking
new_job = prj.create_new_job_from_version(os, version)
new_job.set_rev_build_check_job(self)
# job init
result = new_job.init()
# if init is succeeded!, try to execute
- if result then
+ if result then
# check available server
rserver = @server.get_available_server( new_job )
if rserver != nil and rserver != @server then
- new_job.set_remote_job( rserver )
+ new_job.set_remote_job( rserver )
end
# execute
new_job.execute(true)
# query remote server info & update server state
def update_state
- # send
+ # send
#@status = "DISCONNECTED"
client = BuildCommClient.create( @ip, @port )
if client.nil? then return end
client.terminate
if @status == "DISCONNECTED" then return end
- # send
+ # send
@working_jobs = []
@waiting_jobs = []
client = BuildCommClient.create( @ip, @port )
case job_status
when "WAITING", "JUST_CREATED", "INITIALIZING"
@waiting_jobs.push new_job
- when "WORKING"
+ when "WORKING"
@working_jobs.push new_job
else
#puts "Uncontrolled status"
- end
+ end
end
if not result then @status = "DISCONNECTED" end
else
def add_file_transfer()
- @file_transfer_cnt_mutex.synchronize {
+ @file_transfer_cnt_mutex.synchronize do
@file_transfer_cnt += 1
- }
+ end
end
def remove_file_transfer()
- @file_transfer_cnt_mutex.synchronize {
+ @file_transfer_cnt_mutex.synchronize do
@file_transfer_cnt -= 1
- }
+ end
end
def get_file_transfer_cnt()
require "FileTransferViaFTP"
require "FileTransferViaDirect"
-class RemoteBuilder
+class RemoteBuilder
attr_accessor :id, :log
# initialize
@log = job.log
# build
- ret = build(@job.get_project().repository, @job.source_path, @job.os,
+ ret = build(@job.get_project().repository, @job.source_path, @job.os,
@job.is_rev_build_check_job(), @job.git_commit, @job.no_reverse,
local_pkgs, @job.get_project().dist_name )
if not result then
@log.error( "Building job on remote server failed!", Log::LV_USER )
return false
- end
+ end
# receive binary package
result_files.each do |file_name|
@log.error( "File transfering failed! : #{file_name}", Log::LV_USER )
return false
end
- end
+ end
return true
end
- # upload binary packages that is need to be overwrite
+ # upload binary packages that is need to be overwrite
# before remote package
protected
def send_file_to_remote(file_path, dock = "0")
# create client
- client = BuildCommClient.create( @addr, @port, @log )
+ client = BuildCommClient.create( @addr, @port, @log )
if client.nil? then
@log.error( "Creating communication client failed!", Log::LV_USER)
- return false
+ return false
end
# upload file
end
- # send build request
+ # send build request
protected
def send_build_request(git_repos, os, is_rev_build, commit, no_reverse, local_pkgs, dock = "0", dist_name="BASE")
result_files = []
- client = BuildCommClient.create( @addr, @port, @log )
+ client = BuildCommClient.create( @addr, @port, @log )
if client.nil? then
@log.error( "Creating communication client failed!", Log::LV_USER)
- return false, result_files
- end
+ return false, result_files
+ end
# get local package names
local_pkg_names = local_pkgs.map { |path| File.basename(path) }
rev = is_rev_build ? "YES":"NO"
msg = "BUILD|GIT|#{git_repos}||#{os}|NO|#{no_reverse}|YES|#{rev}|#{commit}|#{pkg_list}|#{dock}|#{dist_name}"
r_job_number = Regexp.new('Added new job "([^"]*)"')
- if client.send( msg ) then
+ if client.send( msg ) then
result = client.read_lines do |l|
# write log first
@log.output( l.strip, Log::LV_USER)
# check build result
if l.include? "Job is stopped by ERROR" or
- l.include? "Error:" then
+ l.include? "Error:" then
result = false
break
end
end
end
- end
+ end
# close socket
client.terminate
protected
def receive_file_from_remote(file_path, dock = "0")
# create client
- client = BuildCommClient.create( @addr, @port, @log )
+ client = BuildCommClient.create( @addr, @port, @log )
if client.nil? then
@log.error( "Creating communication client failed!", Log::LV_USER)
- return false
+ return false
end
# download file
class ReverseBuildChecker
- # check
+ # check
def ReverseBuildChecker.check( job, exit_on_error, override_os = nil )
log = job.log
job_os = (override_os.nil?) ? job.os : override_os
bin_pkg_name_list = []
src_pkg_name_list = []
case job.type
- when "BUILD"
+ when "BUILD"
job.pkginfo.get_target_packages(job_os).each do |pkg|
bin_pkg_name_list.push pkg.package_name
end
- when "REGISTER"
+ when "REGISTER"
if job.pkg_type == "BINARY" then
bin_pkg_name_list.push job.pkg_name
else
end
# get reverse projects from build dependency
- rev_pkgs = []
+ rev_pkgs = []
bin_pkg_name_list.each do |pkg_name|
rev_pkgs += job.pkgsvr_client.get_reverse_build_dependent_packages(pkg_name, job_os)
end
# if not "GIT" project, ignore it
if rev_prj.type != "GIT" then next end
- # if job on resolve process, its unresolved project
+ # if job on resolve process, its unresolved project
#of pending ancestor must be excluded.
if job.type == "BUILD" and not job.pending_ancestor.nil? then
found = false
- job.pending_ancestor.rev_fail_projects.each { |fp|
+ job.pending_ancestor.rev_fail_projects.each do |fp|
f_prj = fp[0]
f_os = fp[1]
- if rev_prj == f_prj and rev_os == f_os then
+ if rev_prj == f_prj and rev_os == f_os then
found = true
break
end
- }
+ end
if found then next end
end
job.get_parent_job().get_sub_jobs().each do |sub_job|
sub_prj = sub_job.get_project()
sub_os = sub_job.os
- if rev_prj == sub_prj and rev_os == sub_os then
+ if rev_prj == sub_prj and rev_os == sub_os then
found = true
break
end
# for all reverse job
rev_build_jobs.each do |rev_job|
- # add to job manager
+ # add to job manager
job.server.jobmgr.add_reverse_build_job(rev_job)
log.info( " * Added new job for reverse-build ... \
#{rev_job.get_project().name}(#{rev_job.os}) (#{rev_job.id})", Log::LV_USER)
success_list = []
failure_list = []
cancel_other_jobs = false
- while not rev_build_finished
+ while not rev_build_finished
rev_build_finished = true
rev_build_jobs.each do |rev_job|
rev_prj = rev_job.get_project()
def self.is_project_included?( prj_list, prj, os )
prj_list.each do |p|
if p[0] == prj and p[1] == os then return true end
- end
+ end
return false
- end
+ end
# write web url for log
# make loop recover when unhandled exception occurred
while not @finish_loop
begin
- main()
+ main()
rescue => e
@log.error e.message
@log.error e.backtrace.inspect
end
- end
+ end
end
end
private
- # thread main
+ # thread main
def main()
# server open
begin
end
# loop
- @log.info "Entering Control Listening Loop ... "
+ @log.info "Entering Control Listening Loop ... "
@finish_loop = false
@comm_server.wait_for_connection(@finish_loop) do |req|
handle_job_request( req )
- end
+ end
# quit
@comm_server.terminate
# wait for job requests
def wait_for_job_requests
req_list = []
- req_list.push @tcp_server.accept
+ req_list.push @tcp_server.accept
return req_list
end
def handle_job_request( req )
# read request
- req_line = req.gets
+ req_line = req.gets
if req_line.nil? then return end
# parse request
cmd = ""
if req_line.split("|").count > 0 then
- cmd = req_line.split("|")[0].strip
+ cmd = req_line.split("|")[0].strip
end
case cmd
Thread.new do
begin
handle_cmd_download( req_line, req )
- rescue => e
- @log.error "Transfering file failed!"
- @log.error e.message
- @log.error e.backtrace.inspect
+ rescue => e
+ @log.error "Transfering file failed!"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ end
end
- end
when "UPLOAD"
Thread.new do
begin
handle_cmd_upload( req_line, req )
- rescue => e
- @log.error "Transfering file failed!"
- @log.error e.message
- @log.error e.backtrace.inspect
+ rescue => e
+ @log.error "Transfering file failed!"
+ @log.error e.message
+ @log.error e.backtrace.inspect
+ end
end
- end
else
- @log.info "Received Unknown REQ: #{req_line}"
+ @log.info "Received Unknown REQ: #{req_line}"
raise "Unknown request: #{req_line}"
end
end
- # "BUILD"
+ # "BUILD"
def handle_cmd_build( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 3 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 3 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
# check type
- if tok[1] != "GIT" then
+ if tok[1] != "GIT" then
@log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
if async then
logger.info( "Above job(s) will be processed asynchronously!", Log::LV_USER)
logger.close
- end
+ end
# add to job queue
if new_job.is_rev_build_check_job() then
end
- # "RESOLVE"
+ # "RESOLVE"
def handle_cmd_resolve( line ,req)
tok = line.split("|").map { |x| x.strip }
- if tok.count < 3 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 3 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
end
@log.info "Received a request for resolving this project : #{project_name}, #{os}"
- # resolve
+ # resolve
new_job.set_resolve_flag()
# create logger and set
if async then
logger.info( "Above job(s) will be processed asynchronously!", Log::LV_USER)
logger.close
- end
+ end
- @parent_server.jobmgr.add_job( new_job )
+ @parent_server.jobmgr.add_job( new_job )
else
- @log.info "Received Wrong REQ: #{line}"
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
- end
+ end
end
# "QUERY"
def handle_cmd_query( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 2 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
# QUERY,SYSTEM
when "SYSTEM"
- #puts "Received QUERY SYSTEM"
+ #puts "Received QUERY SYSTEM"
BuildCommServer.send_begin(req)
BuildCommServer.send(req,"#{@parent_server.host_os},#{@parent_server.jobmgr.max_working_jobs}")
# print GIT projects
sorted_list = @parent_server.prjmgr.projects.sort { |x,y| x.name <=> y.name }
sorted_list.each do |prj|
- if prj.type != "GIT" then next end
+ if prj.type != "GIT" then next end
BuildCommServer.send(req,"G,#{prj.name},#{prj.repository},#{prj.branch}")
end
# print BINARY projects
sorted_list.each do |prj|
- if prj.type != "BINARY" then next end
+ if prj.type != "BINARY" then next end
BuildCommServer.send(req,"B,#{prj.name},#{prj.pkg_name}")
end
# print REMOTE project
sorted_list.each do |prj|
- if prj.type != "REMOTE" then next end
+ if prj.type != "REMOTE" then next end
BuildCommServer.send(req,"R,#{prj.name}")
end
BuildCommServer.send_end(req)
BuildCommServer.disconnect(req)
else
- @log.info "Received Wrong REQ: #{line}"
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
end
# "CANCEL"
def handle_cmd_cancel( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 2 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
cancel_job = nil
# "STOP"
def handle_cmd_stop( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 2 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
# "UPGRADE"
def handle_cmd_upgrade( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 2 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
# "FULLBUILD"
def handle_cmd_fullbuild( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 2 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
- server_passwd = tok[1]
- dist_name = tok[2]
+ server_passwd = tok[1]
+ dist_name = tok[2]
if (dist_name.nil? or dist_name.empty?) then
dist_name = @parent_server.distmgr.get_default_distribution_name()
end
# "REGISTER"
def handle_cmd_register( line, req )
tok = line.split("|").map { |x| x.strip }
- if tok.count < 4 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 4 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
@log.info "Received File transfer REQ : #{line}"
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 2 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
def handle_cmd_download( line, req )
@log.info "Received File transfer REQ : #{line}"
tok = line.split("|").map { |x| x.strip }
- if tok.count < 3 then
- @log.info "Received Wrong REQ: #{line}"
+ if tok.count < 3 then
+ @log.info "Received Wrong REQ: #{line}"
raise "Invalid request format is used: #{line}"
end
if dock_num != "0" and File.exist? "#{outgoing_dir}/#{file_name}" then
@log.info "Removing requested file...: #{file_name}"
FileUtils.rm_rf "#{outgoing_dir}/#{file_name}"
- if Utils.directory_emtpy?(outgoing_dir) then
- FileUtils.rm_rf "#{outgoing_dir}"
+ if Utils.directory_emtpy?(outgoing_dir) then
+ FileUtils.rm_rf "#{outgoing_dir}"
end
end
return nil
end
- return prj
+ return prj
end
private
return nil
end
- return prj
+ return prj
end
result = result + @parent_server.supported_os_list
elsif os == "default" then
- os = @parent_server.supported_os_list[0]
+ os = @parent_server.supported_os_list[0]
result.push os
@log.info "The default OS \"#{os}\" is used as target OS"
result.push svr_os
end
end
- else
+ else
if not @parent_server.supported_os_list.include?(os) then
BuildCommServer.send_begin(req)
req.puts "Error: Unsupported OS name \"#{os}\" is used!"
prj = @parent_server.prjmgr.create_unnamed_git_project( git_repos, dist_name )
end
new_job = prj.create_new_job(os)
- new_job.set_internal_job( dock_num )
+ new_job.set_internal_job( dock_num )
new_job.git_commit = git_commit
incoming_dir = "#{@parent_server.transport_path}/#{dock_num}"
pkg_files.each do |file|
# create buildroot if not set
if buildroot_dir.nil? then
- buildroot_dir = "#{CONFIG_ROOT}/#{id}/buildroot"
+ buildroot_dir = "#{CONFIG_ROOT}/#{id}/buildroot"
if not File.exist? buildroot_dir then
FileUtils.mkdir_p buildroot_dir
end
# create cachedir if not set
if cache_dir.nil? then
- cache_dir = "#{CONFIG_ROOT}/#{id}/build_cache"
+ cache_dir = "#{CONFIG_ROOT}/#{id}/build_cache"
if not File.exist? cache_dir then
FileUtils.mkdir_p cache_dir
end
# check instance first
if not @@instance_map[id] == nil
- return @@instance_map[id]
+ return @@instance_map[id]
end
- # check builder config
- if not File.exist? "#{CONFIG_ROOT}/#{id}/builder.cfg"
+ # check builder config
+ if not File.exist? "#{CONFIG_ROOT}/#{id}/builder.cfg"
raise RuntimeError, "The builder \"#{id}\" does not exist."
end
return false
end
- # read pkginfo
+ # read pkginfo
begin
pkginfo = PackageManifest.new("#{src_path}/package/pkginfo.manifest")
rescue => e
return false
end
- # set default build os
+ # set default build os
build_host_os = @host_os
# check there are packages which can be built
# create client
@log.info( "Downloding client is initializing...", Log::LV_USER)
cl = Client.new(@pkgserver_url, build_root_dir, @log)
- if clean then
+ if clean then
cl.clean(true)
end
# get local repository path list
repos_paths = []
- local_pkgs.each { |path|
+ local_pkgs.each do |path|
repos_paths.push File.dirname(path)
- }
+ end
repos_paths.uniq!
# install build dependencies
end
@log.info( " * #{dep.package_name}", Log::LV_USER)
- # get local dependent package
+ # get local dependent package
pkgexp = Regexp.new("\/#{dep.package_name}_.*_#{dep_target_os}\.zip$")
local_dep_pkgs = local_pkgs.select{|l| l =~ pkgexp}
# read configuration
builder_dir = "#{CONFIG_ROOT}/#{id}"
log_path = nil
- cache_dir = "#{CONFIG_ROOT}/#{id}/build_cache"
- buildroot_dir = "#{CONFIG_ROOT}/#{id}/buildroot"
+ cache_dir = "#{CONFIG_ROOT}/#{id}/build_cache"
+ buildroot_dir = "#{CONFIG_ROOT}/#{id}/buildroot"
File.open( "#{builder_dir}/builder.cfg", "r" ) do |f|
f.each_line do |l|
if l.start_with?("PSERVER_URL=")
pkgserver_url = l.split("=")[1].strip
elsif l.start_with?("LOG-PATH=")
- log_path = l.split("=")[1].strip
- log_path = nil if log_path == "STDOUT"
+ log_path = l.split("=")[1].strip
+ log_path = nil if log_path == "STDOUT"
elsif l.start_with?("CACHE-DIR=")
- cache_dir = l.split("=")[1].strip
+ cache_dir = l.split("=")[1].strip
elsif l.start_with?("BUILDROOT-DIR=")
- buildroot_dir = l.split("=")[1].strip
+ buildroot_dir = l.split("=")[1].strip
else
- next
- end
+ next
+ end
end
end
# execute build command
def execute_build_command( target, src_path, build_root_dir, os, version )
- # get category
+ # get category
os_category = Utils.get_os_category( os )
# convert directory format when windows
lines = []
f.each_line do |l|
lines.push l
- if l.start_with? "}" then
+ if l.start_with? "}" then
contents = contents + lines
lines = []
end
f.puts "rm -rf ${PKG_CACHE_DIR}/*"
f.puts "CACHEDIR=${PKG_CACHE_DIR}/$(cache_key)"
f.puts "mkdir -p ${CACHEDIR}"
- when "install"
+ when "install"
f.puts " "
else
@log.warn( "Wrong build-target is used: \"#{target}\"", Log::LV_USER)
# create package file
def make_zip(pkginfo,os,src_path)
- # get category
+ # get category
os_category = Utils.get_os_category( os )
pkginfo.packages.each do |pkg|
end
end
- # zip
+ # zip
@log.info( "Creating package file ... #{pkg.package_name}_#{pkg.version}_#{os}.zip", Log::LV_USER)
cmd = "cd \"#{install_dir}\"; zip -r -y #{src_path}/#{pkg.package_name}_#{pkg.version}_#{os}.zip *"
@log.info( cmd )
return false
end
- # get category
+ # get category
# make clean
pkginfo.packages.each do |pkg|
os = pkg.os
FileUtils.rm_rf "#{src_path}/package/#{pkg.package_name}.package.#{os_category}"
end
end
- end
+ end
# execute
return execute_build_command("clean", src_path, build_root_dir, target_os, pkginfo.get_version)
=begin
- CleanOptionParser.rb
+ CleanOptionParser.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
def parse()
- #option parsing
+ #option parsing
option = {}
optparse = OptionParser.new(nil, 32, ' '*8) do |opts|
opts.banner = "Clean the package service command-line tool." + "\n" \
opts.on('-h','--help', 'display help') do
puts opts
exit
- end
+ end
- opts.on('-v','--version', 'display version') do
+ opts.on('-v','--version', 'display version') do
puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version()
exit
- end
- end
+ end
+ end
optparse.parse!
return option
-end
+end
def parse()
- #option parsing
+ #option parsing
option = {}
optparse = OptionParser.new do |opts|
opts.banner = "Build and packaging service command-line tool." + "\n" \
opts.on('-u','--url <package server url>', 'remote package server url: http://127.0.0.1/dibs/unstable') do |url|
option[:url] = url
- end
+ end
option[:os] = nil
opts.on('-o','--os <os>', 'operating system ') do |os|
end
option[:clean] = false
- opts.on('-c','--clean', 'clean build') do
+ opts.on('-c','--clean', 'clean build') do
option[:clean] = true
end
option[:rev] = false
- #opts.on('-r','--rev', 'reverse build dependency check') do
+ #opts.on('-r','--rev', 'reverse build dependency check') do
# option[:rev] = true
- #end
+ #end
opts.on('-h','--help', 'display help') do
puts opts
exit
end
- opts.on('-v','--version', 'display version') do
+ opts.on('-v','--version', 'display version') do
puts "DIBS(Distributed Intelligent Build System) version " + Utils.get_version()
exit
- end
+ end
end
optparse.parse!
end
return option
-end
+end
- S-Core Co., Ltd
=end
-class Action
+class Action
attr_accessor :time, :period
def initialize( time, period )
end
- # execute action
+ # execute action
def execute()
end
end
# create cache dir if not nil
- if not cache_dir.nil? and not File.exist? cache_dir then
- FileUtils.mkdir_p cache_dir
+ if not cache_dir.nil? and not File.exist? cache_dir then
+ FileUtils.mkdir_p cache_dir
end
return new(port, log, ftp_url, cache_dir)
# wait for connection and handle request
def wait_for_connection(quit_loop)
while( not quit_loop )
- req = @tcp_server.accept
+ req = @tcp_server.accept
begin
yield req if block_given?
req.puts "ERROR"
@log.error "Unsupported transporter type! : #{type}"
return false
- end
+ end
req.puts "TRANSPORTER_OK"
puts "[BuildCommServer] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return false
+ return false
end
return true
file_size = tok[2].to_i
checksum = tok[3]
- # check download cache
- if File.exist? dst_file and File.directory? dst_file then
+ # check download cache
+ if File.exist? dst_file and File.directory? dst_file then
target_file = File.join(dst_file,file_name)
- else
- target_file = dst_file
+ else
+ target_file = dst_file
end
- if not @cache_dir.nil? and
+ if not @cache_dir.nil? and
check_download_cache( target_file, file_size, checksum ) then
@log.info "Download cache hit! Copied from cache.: #{file_name}"
req.puts "ERROR"
@log.error "Unsupported transporter type! : #{type}"
return false
- end
+ end
req.puts "TRANSPORTER_OK"
else
@log.warn "Unhandled message: #{line}"
end
- end
+ end
rescue => e
- puts "[BuildCommServer] Exception"
+ puts "[BuildCommServer] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return false
+ return false
end
return true
end
rescue Timeout::Error
false
- end
+ end
private
file_name = File.basename(dst_file)
cache_file = "#{@cache_dir}/#{file_name}"
- @download_cache_mutex.synchronize {
+ @download_cache_mutex.synchronize do
found = false
# check file exist
- if File.exist? cache_file and
+ if File.exist? cache_file and
File.size(cache_file) == file_size and
Utils.checksum(cache_file) == checksum then
# refresh cache dir
curr_time = Time.now
- Dir.entries(@cache_dir).each { |fname|
+ Dir.entries(@cache_dir).each do |fname|
if fname == "." or fname == ".." then next end
file_path = "#{@cache_dir}/#{fname}"
if File.mtime(file_path) + 3600 < curr_time then
FileUtils.rm_rf file_path
end
- }
+ end
return found
- }
+ end
end
def add_download_cache(dst_file)
file_name = File.basename(dst_file)
cache_file = "#{@cache_dir}/#{file_name}"
- @download_cache_mutex.synchronize {
+ @download_cache_mutex.synchronize do
# copy & touch
FileUtils.copy_file(dst_file, cache_file)
FileUtils.touch cache_file
- }
+ end
end
end
@socket.puts( msg )
return true
- end
+ end
def print_stream
begin
l = @socket.gets()
- if l.nil? then
+ if l.nil? then
puts "Connection refused"
- return false
+ return false
end
# check protocol
end
- # handle
+ # handle
def read_lines
begin
# get first line
l = nil
- timeout(5) do
+ timeout(5) do
l = @socket.gets()
end
- if l.nil? then
- return false
+ if l.nil? then
+ return false
end
# check protocol
begin
l = @socket.gets()
- if l.nil? then
+ if l.nil? then
puts "Connection refused"
return nil
end
result = true
begin
l = @socket.gets()
- if l.nil? then
+ if l.nil? then
@log.error "[BuildCommClient] Connection refused"
- return false
+ return false
end
# check protocol
@log.info "Server does not have cached file"
send "CHECK_TRANSPORTER,#{transporter.type}"
- when "TRANSPORTER_OK"
+ when "TRANSPORTER_OK"
if not transporter.send_file( src_file, @socket, true ) then
result = false
else
@log.info "Sending file succeeded!"
end
- when "TRANSPORTER_FAIL"
+ when "TRANSPORTER_FAIL"
@log.warn "Server does not support transporter type: #{transporter.type}"
result = false
end
end
rescue => e
- puts "[BuildCommClient] Exception"
+ puts "[BuildCommClient] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return false
+ return false
end
return result
begin
l = @socket.gets()
- if l.nil? then
+ if l.nil? then
@log.error "[BuildCommClient] Connection refused"
return false
end
@log.info "Server is ready!"
send "CHECK_TRANSPORTER,#{transporter.type}"
- when "TRANSPORTER_OK"
+ when "TRANSPORTER_OK"
if not transporter.receive_file( dst_file, @socket, true ) then
result = false
else
end
end
rescue => e
- puts "[BuildCommServer] Exception"
+ puts "[BuildCommServer] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return false
+ return false
end
return result
# check protocol
def protocol_matched?(l)
- version = ( l.split(",")[1].nil? ? "1.0.0" : l.split(",")[1] )
+ version = ( l.split(",")[1].nil? ? "1.0.0" : l.split(",")[1] )
if not l.start_with? "=BEGIN" or
version.nil? or version != VERSION then
return false
else
return true
- end
+ end
end
end
+=begin
+
+ FileTransferViaDirect.rb
+
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
+
+Contact:
+Taejun Ha <taejun.ha@samsung.com>
+Jiil Hyoun <jiil.hyoun@samsung.com>
+Donghyuk Yang <donghyuk.yang@samsung.com>
+DongHee Yang <donghee.yang@samsung.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Contributors:
+- S-Core Co., Ltd
+=end
require 'socket'
require 'log'
conn.puts "FILE_INFO,#{filename},#{size},#{checksum}"
# read file contents
# send via tcp/ip
- File.open(src_file, "rb") {|io|
+ File.open(src_file, "rb") do |io|
while size > 0
buf = io.read(size > 1024*1024 ? 1024*1024 : size)
conn.write( buf )
size -= buf.length
end
- }
+ end
- @log.info "Upload is succeeded!"
+ @log.info "Upload is succeeded!"
conn.puts "SEND_OK"
# wait for download result
when "RECEIVE_OK"
@log.info "Received download success message from remote site"
- return true
+ return true
when "RECEIVE_FAIL"
@log.info "Received download fail message from remote site"
@log.error "Unhandled message: #{line}"
return false
end
- end
+ end
end
dst_file = File.join(dst_file, filename)
end
- File.open( dst_file, "wb" ) { |io|
+ File.open( dst_file, "wb" ) do |io|
while size > 0
buf = conn.read(size > 1024*1024 ? 1024*1024 : size)
io.write( buf )
size -= buf.length
end
- }
+ end
conn.puts "RECEIVE_OK"
+=begin
+
+ FileTransferViaFTP.rb
+
+Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
+
+Contact:
+Taejun Ha <taejun.ha@samsung.com>
+Jiil Hyoun <jiil.hyoun@samsung.com>
+Donghyuk Yang <donghyuk.yang@samsung.com>
+DongHee Yang <donghee.yang@samsung.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Contributors:
+- S-Core Co., Ltd
+=end
require 'socket'
require 'log'
if is_client then
# check ftp info
if @ip.nil? or @port.nil? or @username.nil? or @passwd.nil? then
- @log.error "No FTP information!"
- conn.puts "UPLOAD_FAIL"
- return false
+ @log.error "No FTP information!"
+ conn.puts "UPLOAD_FAIL"
+ return false
end
conn.puts "DOWNLOAD_REQ,#{@ip},#{@port},#{@username},#{@passwd}"
end
ftp_filepath = nil
for attempt in ATTEMPTS
ftp_filepath = putfile( src_file, ip, port, username, passwd )
- if !ftp_filepath.nil? then
+ if !ftp_filepath.nil? then
break
- else
- @log.info "The #{attempt} uploading attempt failed!"
+ else
+ @log.info "The #{attempt} uploading attempt failed!"
end
end
- if ftp_filepath.nil? then
- conn.puts "UPLOAD_FAIL"
- return false
- else
- @log.info "Upload is succeeded at #{attempt}"
+ if ftp_filepath.nil? then
+ conn.puts "UPLOAD_FAIL"
+ return false
+ else
+ @log.info "Upload is succeeded at #{attempt}"
conn.puts "UPLOAD_OK,#{ftp_filepath}"
end
when "DOWNLOAD_OK"
@log.info "Received download success message from remote site"
# clean
- cleandir( ftp_filepath, ip, port, username, passwd)
+ cleandir( ftp_filepath, ip, port, username, passwd)
@log.info "Cleaned temporary dir on FTP server: #{ftp_filepath}"
- return true
+ return true
when "DOWNLOAD_FAIL"
@log.info "Received download fail message from remote site"
@log.error "Unhandled message: #{line}"
return false
end
- end
+ end
end
if is_client then
# check ftp info
if @ip.nil? or @port.nil? or @username.nil? or @passwd.nil? then
- @log.error "No FTP information!"
- conn.puts "DOWNLOAD_FAIL"
- return false
+ @log.error "No FTP information!"
+ conn.puts "DOWNLOAD_FAIL"
+ return false
end
conn.puts "UPLOAD_REQ,#{@ip},#{@port},#{@username},#{@passwd}"
end
dst_filepath = nil
for attempt in ATTEMPTS
dst_filepath = getfile( filepath, dst_file, ip, port, username, passwd )
- if not dst_filepath.nil? then
+ if not dst_filepath.nil? then
break
- else
- @log.info "The #{attempt} downloading attempt failed!"
- end
- end
- if dst_filepath.nil? then
- conn.puts "DOWNLOAD_FAIL"
- return false
- else
- @log.info " Server is the #{attempt} successful attempt to download"
+ else
+ @log.info "The #{attempt} downloading attempt failed!"
+ end
+ end
+ if dst_filepath.nil? then
+ conn.puts "DOWNLOAD_FAIL"
+ return false
+ else
+ @log.info " Server is the #{attempt} successful attempt to download"
conn.puts "DOWNLOAD_OK"
return true
end
if port.nil? or port == "" then
ftp.connect(ip)
else
- ftp.connect(ip, port)
+ ftp.connect(ip, port)
end
@log.info "[FTP log] Connected FTP server (#{ip}:#{port})"
ftp.login(username, passwd)
ftp.binary = true
- ftp.passive = true
+ ftp.passive = true
ftp.mkdir(uniqdir)
ftp.chdir(uniqdir)
ftp.put(bpath)
@log.info "[FTP log] Put a file"
- @log.info "[FTP log] from \"#{bpath}\" to \"#{ftp_filepath}\""
+ @log.info "[FTP log] from \"#{bpath}\" to \"#{ftp_filepath}\""
files = ftp.list(filename)
- if files.empty? then
+ if files.empty? then
@log.error "[FTP log] Failed to upload file (#{filename} does not exist)"
- return nil
+ return nil
end
ftp.quit
@log.info "[FTP log] Disconnected FTP server"
@log.error "[FTP log] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return nil
+ return nil
end
return ftp_filepath
end
ftp = Net::FTP.new
if port.nil? or port == "" then
ftp.connect(ip)
- else
+ else
ftp.connect(ip, port)
- end
+ end
@log.info "[FTP log] Connected FTP server (#{ip}:#{port})"
ftp.login(username, passwd)
ftp.binary = true
@log.error "[FTP log] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return nil
+ return nil
end
if not File.exist? dst_file then
@log.error "[FTP log] Failed to download file (#{dst_file} does not exist)"
return nil
- end
+ end
return bpath
end
if port.nil? or port == "" then
ftp.connect(ip)
else
- ftp.connect(ip, port)
- end
+ ftp.connect(ip, port)
+ end
@log.info "[FTP log] Connected FTP server (#{ip}:#{port})"
ftp.login(username, passwd)
old_dir = ftp.pwd
end
ftp.chdir(old_dir)
ftp.rmdir(dirname)
- @log.info "[FTP log] Clean dir (#{dirname})"
+ @log.info "[FTP log] Clean dir (#{dirname})"
ftp.quit
@log.info "[FTP log] Disconnected FTP server"
rescue => e
@log.error "[FTP log] Exception"
@log.error e.message
@log.error e.backtrace.inspect
- return nil
+ return nil
end
return true
list.uniq!
return list
- end
+ end
# scan all source dependencies
list.uniq!
return list
- end
+ end
# scan all install dependencies
list.uniq!
return list
- end
+ end
def package_exist?(target_os, host_os)
@packages.each do |pkg|
# only package that used in target os
- if pkg.os_list.include?(target_os) and
+ if pkg.os_list.include?(target_os) and
pkg.build_host_os.include?(host_os)
return true
end
end
return false
- end
+ end
def get_version()
$LOAD_PATH.unshift File.dirname(__FILE__)
-class ScheduledActionHandler
+class ScheduledActionHandler
attr_accessor :quit
# init
# start thread
def start()
- @thread = Thread.new {
+ @thread = Thread.new do
# main
thread_main()
- # close
+ # close
terminate()
- }
+ end
end
# if periodic action, renew the time
# else remove it from list
if action.period != 0 then
- while current_time > action.time
+ while current_time > action.time
action.time = action.time + action.period
end
else
- @actions.delete(action)
+ @actions.delete(action)
end
end
end
# sleep 10 sec
- sleep 10
+ sleep 10
end
end
class Version < Array
def initialize s
- super(s.split('.').map { |e| e.to_i })
- end
- def < x
- (self <=> x) < 0
- end
- def <= x
- (self <=> x) <= 0
- end
- def > x
- (self <=> x) > 0
- end
- def >= x
- (self <=> x) >= 0
- end
- def == x
- (self <=> x) == 0
+ super(s.split('.').map { |e| e.to_i })
+ end
+ def < x
+ (self <=> x) < 0
+ end
+ def <= x
+ (self <=> x) <= 0
+ end
+ def > x
+ (self <=> x) > 0
+ end
+ def >= x
+ (self <=> x) >= 0
+ end
+ def == x
+ (self <=> x) == 0
end
def compare x
if self < x then return -1
elsif self == x then return 0
else return 1 end
- end
-end
+ end
+end
=end
$LOAD_PATH.unshift File.dirname(__FILE__)
-require "Version"
+require "Version"
class Dependency
attr_accessor :package_name, :comp, :base_version, :target_os_list
@comp = comp
@base_version = base_version
@target_os_list = target_os_list
- end
+ end
def to_s
- string = @package_name
- if not @comp.nil? and not @base_version.nil? then
- string = string + " ( #{@comp} #{@base_version} )"
+ string = @package_name
+ if not @comp.nil? and not @base_version.nil? then
+ string = string + " ( #{@comp} #{@base_version} )"
end
- if not @target_os_list.empty? then
+ if not @target_os_list.empty? then
string = string + " [ #{@target_os_list.join("|")} ]"
- end
+ end
return string
- end
+ end
def match? ver
- if @base_version.nil?
+ if @base_version.nil?
return true
end
return Version.new(ver) < Version.new(@base_version)
else
return true
- end
+ end
end
-end
+end
#!/usr/bin/ruby
=begin
- execute_with_log.rb
+ execute_with_log.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
cmd = Utils.generate_shell_command(cmd, nil)
# execute and write log
-IO.popen("#{cmd} 2>&1") { |io|
- io.each { |line|
+IO.popen("#{cmd} 2>&1") do |io|
+ io.each do |line|
log.info line
- }
-}
+ end
+end
# return exit code
exit $?.exitstatus
$LOAD_PATH.unshift File.dirname(__FILE__)
require "mailConfig"
-class Mail
+class Mail
- def Mail.send_mail( mail_to, subject, contents )
+ def Mail.send_mail( mail_to, subject, contents )
if mail_to.nil? or mail_to.empty? \
or subject.nil? or subject.empty? \
- or contents.nil? or contents.empty? then
+ or contents.nil? or contents.empty? then
return false
- end
+ end
message = <<MESSAGE_END
From: #{SENDER}
end
def Mail.send_mail2( mail_to_list, message )
- if mail_to_list.empty? then
+ if mail_to_list.empty? then
puts "There is no maintainer email address "
else
- begin
+ begin
Net::SMTP.start('localhost') do |smtp|
- smtp.send_message( message, SENDER, mail_to_list)
- end
- rescue => e
+ smtp.send_message( message, SENDER, mail_to_list)
+ end
+ rescue => e
puts "Can't send result email"
puts e.message
end
def Mail.parse_email( low_email_list )
mail_list = []
- low_email_list.split(",").each do | low_email |
+ low_email_list.split(",").each do | low_email |
ms = low_email.index('<')
me = low_email.index('>')
- if ms.nil? or me.nil? then
- next
- else
- mail = low_email[(ms+1)..(me-1)]
+ if ms.nil? or me.nil? then
+ next
+ else
+ mail = low_email[(ms+1)..(me-1)]
end
if mail.include?("@") then mail_list.push mail end
- end
+ end
- return mail_list
+ return mail_list
end
end
#string trim
dependency = dep.tr " \t\r\n", ""
#version extract
- vs = dependency.index('(')
- ve = dependency.index(')')
- if not vs.nil? and not ve.nil? then
+ vs = dependency.index('(')
+ ve = dependency.index(')')
+ if not vs.nil? and not ve.nil? then
comp = dependency[(vs+1)..(vs+2)]
base_version = dependency[(vs+3)..(ve-1)]
- end
+ end
#os list extract
- os = dependency.index('[')
- oe = dependency.index(']')
- if not os.nil? and not oe.nil? then
+ os = dependency.index('[')
+ oe = dependency.index(']')
+ if not os.nil? and not oe.nil? then
target_os_list = dependency[(os+1)..(oe-1)].split("|")
- end
+ end
# package_name extract
pe = dependency.index(/[\]\[\)\(]/)
if pe.nil?
package_name = dependency[0..pe-1]
end
#package_name check
- if not package_name.empty? then
+ if not package_name.empty? then
dependency_list.push Dependency.new(package_name,comp,base_version,target_os_list)
- end
- end
+ end
+ end
return dependency_list
- end
-end
+ end
+end
=begin
- utils.rb
+ utils.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
os = "windows-#{arch}"
when "Darwin"
os = "macos-64"
- end
+ end
- return os
+ return os
end
def Utils.get_all_OSs()
- return ["ubuntu-32","ubuntu-64","windows-32","windows-64","macos-64","opensuse-32", "opensuse-64"]
+ return ["ubuntu-32","ubuntu-64","windows-32","windows-64","macos-64","opensuse-32", "opensuse-64"]
end
- def Utils.create_uniq_name
- time = Time.new
+ def Utils.create_uniq_name
+ time = Time.new
- # uniq snapshot_name name is year_month_day_hour_min_sec_microsec
+ # uniq snapshot_name name is year_month_day_hour_min_sec_microsec
return time.strftime("%m%d%H%M%S") + time.usec.to_s.rjust(6, '0')
end
case protocol
when "http" then
return true
- else
+ else
return false
end
end
slen = arr_sver.length
tlen = arr_tver.length
- len = tlen
+ len = tlen
if slen > tlen then
gap = slen - tlen
arr_sver.push("0")
end
len = tlen
- end
+ end
len.times do |i|
if arr_sver[i].to_i < arr_tver[i].to_i then
cmd = generate_shell_command(cmd, os_category)
`#{cmd}`
- if $?.to_i == 0 then ret = true else ret = false end
+ if $?.to_i == 0 then ret = true else ret = false end
return ret
end
cmd = generate_shell_command(cmd, os_category)
# get result
- IO.popen("#{cmd} 2>&1") { |io|
+ IO.popen("#{cmd} 2>&1") do |io|
io.each do |line|
result_lines.push line
end
- }
+ end
- if $?.to_i == 0 then
+ if $?.to_i == 0 then
return result_lines
- else
+ else
return nil
- end
+ end
end
def Utils.execute_shell_return_ret(cmd, os_category = nil)
cmd = generate_shell_command(cmd, os_category)
# print log
- pipe = IO.popen("#{cmd} 2>&1") { |io|
+ pipe = IO.popen("#{cmd} 2>&1") do |io|
io.each do |line|
logger.info line
end
- }
+ end
return [nil, nil]
end
save_stderr.close if save_stderr
($0 == __FILE__ ) ? processInfo : processInfo.unpack("LLLL")[2]
- end
+ end
def Utils.is_absolute_path(path)
- if is_unix_like_os( HOST_OS ) then
+ if is_unix_like_os( HOST_OS ) then
# if path start "/" then absoulte path
if path.start_with?("/") then
return true
else
- return false
+ return false
end
- elsif is_windows_like_os( HOST_OS ) then
+ elsif is_windows_like_os( HOST_OS ) then
# if path start "c:/" or "D:/" or ... then absoulte path
if path =~ /^[a-zA-Z]:[\/]/ then
return true
else
return false
end
- else
+ else
puts "HOST_OS is invalid"
end
end
# this will be used on MinGW/MSYS
def Utils.get_unix_path(path)
- if is_unix_like_os( HOST_OS ) then
+ if is_unix_like_os( HOST_OS ) then
return path
elsif is_windows_like_os( HOST_OS ) then
new_path = path
new_path = "/" + new_path[0,1] + new_path[2..-1]
end
return new_path
- else
+ else
puts "HOST_OS is invalid"
return path
end
end
def Utils.file_lock(lock_file_name)
- lock_file = File.new(lock_file_name, File::RDWR|File::CREAT, 0644)
+ lock_file = File.new(lock_file_name, File::RDWR|File::CREAT, 0644)
lock_file.flock(File::LOCK_EX)
lock_file.rewind
lock_file.flush
ip = parse1[0]
port = 21
else
- return nil
- end
+ return nil
+ end
return [ip, port, id, passwd]
end
filename = File.basename(file_path)
ext = File.extname(filename)
- # path should be unix path if it is used in tar command
+ # path should be unix path if it is used in tar command
_package_file_path = Utils.get_unix_path(file_path)
_path = Utils.get_unix_path(path)
# check exit code
ret = execute_shell "#{extract_file_command}"
- if not ret then return false end
+ if not ret then return false end
- # check result file
+ # check result file
if not path.nil? then
target_file_path = File.join(path, target_file)
else
# check if the os is unix-like
def Utils.is_unix_like_os(os_name)
- if os_name.start_with? "ubuntu-" or
- os_name.start_with? "opensuse-" or
+ if os_name.start_with? "ubuntu-" or
+ os_name.start_with? "opensuse-" or
os_name.start_with?"macos-" then
return true
else
pids += get_sub_processes(base_pid)
# kill&wait
- pids.each { |pid|
+ pids.each do |pid|
begin
os_category = get_os_category(HOST_OS)
- if os_category != "windows" then
+ if os_category != "windows" then
Process.kill("TERM", pid)
else
Process.kill(9, pid)
# On windows, SIGTERM does not working
Utils.execute_shell("kill #{pid}")
end
- }
+ end
Process.waitpid2(base_pid)
end
# generate pid => ppid hash
# NOTE. MinGW does not support "-o" option and has different output format
os_category = get_os_category(HOST_OS)
- if os_category != "windows" then
- Hash[*`ps -eo pid,ppid`.scan(/\d+/).map{|x| x.to_i}].each{|pid,ppid|
+ if os_category != "windows" then
+ Hash[*`ps -eo pid,ppid`.scan(/\d+/).map{|x| x.to_i}].each do |pid,ppid|
descendants[ppid] << descendants[pid]
- }
+ end
return descendants[base].flatten - [base]
else
result = []
require 'rubygems'
require 'sys/proctable'
win_pids = [base]
- Sys::ProcTable.ps { |proc|
+ Sys::ProcTable.ps do |proc|
win_pids << proc.pid if win_pids.include?(proc.ppid)
- }
+ end
result += win_pids
- # gather MinGW/MSYS process id
- Hash[*`ps -e`.scan(/^[\s]*(\d+)[\s]+(\d+)/).flatten.map{|x| x.to_i}].each {|pid,ppid|
+ # gather MinGW/MSYS process id
+ Hash[*`ps -e`.scan(/^[\s]*(\d+)[\s]+(\d+)/).flatten.map{|x| x.to_i}].each do |pid,ppid|
descendants[ppid] << descendants[pid]
- }
+ end
# get sub processes of windows pids
- win_pids.each { |pid|
+ win_pids.each do |pid|
if not descendants[pid].nil? then
result += descendants[pid].flatten
end
- }
+ end
result.uniq!
- result = result - [base]
+ result = result - [base]
return result
end
- end
+ end
HOST_OS = Utils.identify_current_OS()
end
- # set static variable in WORKING_DIR, HOME
- if defined?(WORKING_DIR).nil? then WORKING_DIR = Dir.pwd end
- if defined?(HOME).nil? then
+ # set static variable in WORKING_DIR, HOME
+ if defined?(WORKING_DIR).nil? then WORKING_DIR = Dir.pwd end
+ if defined?(HOME).nil? then
# get home directory, using Dir.chdir
Dir.chdir
HOME = Dir.pwd
- Dir.chdir WORKING_DIR
+ Dir.chdir WORKING_DIR
end
end
- private
+ private
def execute_internal()
# update pkg info
- @pkgserver.reload_dist_package
+ @pkgserver.reload_dist_package
- # sync
+ # sync
@pkgserver.sync( @dist_name, false )
- end
+ end
end
# start thread
def start()
# scan all sync distribution
- @server.distribution_list.each do |dist|
+ @server.distribution_list.each do |dist|
# if dist does not have parent server then skip sync
if dist.server_url.empty? then next end
end
# start handler
- @handler.start
+ @handler.start
end
end
require 'socket'
require 'thread'
-$LOAD_PATH.unshift File.dirname(__FILE__)
-$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common"
-$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/build_server"
+$LOAD_PATH.unshift File.dirname(__FILE__)
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common"
+$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/build_server"
require "packageServerConfig"
require "BuildComm"
require "net/ftp"
# mutax for register operation
$register_mutex = Mutex.new
-class SocketRegisterListener
+class SocketRegisterListener
# initialize
- def initialize (parent)
- @parent_server = parent
+ def initialize (parent)
+ @parent_server = parent
@thread = nil
@finish_loop = false
@log = @parent_server.log
end
# start listening
- def start()
+ def start()
@log.info "SocketRegisterListener start"
- @thread = Thread.new {
- main()
- }
+ @thread = Thread.new do
+ main()
+ end
end
# quit listening
private
- # thread main
+ # thread main
def main()
@log.info "SocketRegisterListener entering main loop"
# server open
begin
@comm_server = BuildCommServer.create(@parent_server.port, @log)
rescue => e
- @log.info "Server creation failed"
- @log.error e.message
+ @log.info "Server creation failed"
+ @log.error e.message
@log.error e.backtrace.inspect
return
end
# loop
- @log.info "Entering Control Listening Loop ... "
+ @log.info "Entering Control Listening Loop ... "
@finish_loop = false
@comm_server.wait_for_connection(@finish_loop) do |req|
begin
handle_job_request( req )
rescue => e
@log.info "error occured in handle_job_request function"
- @log.error e.message
+ @log.error e.message
@log.error e.backtrace.inspect
end
- end
+ end
# quit
@comm_server.terminate
# wait for job requests
def wait_for_job_requests
req_list = []
- req_list.push @tcp_server.accept
+ req_list.push @tcp_server.accept
return req_list
end
def handle_job_request( req )
# read request
- req_line = req.gets
+ req_line = req.gets
if req_line.nil? then return end
# parse request
cmd = ""
if req_line.split("|").count > 0 then
- cmd = req_line.split("|")[0].strip
+ cmd = req_line.split("|")[0].strip
end
case cmd
when "UPLOAD"
- Thread.new {
+ Thread.new do
handle_cmd_upload( req_line, req )
- }
+ end
when "REGISTER"
- Thread.new {
+ Thread.new do
handle_cmd_register( req_line, req )
- }
+ end
when "STOP"
handle_cmd_stop( req_line, req )
else
- @log.error "Received Unknown REQ: #{req_line}"
+ @log.error "Received Unknown REQ: #{req_line}"
end
- @log.info "REQ processing done"
- end
+ @log.info "REQ processing done"
+ end
- # "UPLOAD"
- def handle_cmd_upload( line, req )
+ # "UPLOAD"
+ def handle_cmd_upload( line, req )
@log.info "Received File transfer REQ : #{line}"
BuildCommServer.send_begin(req)
tok = line.split("|").map { |x| x.strip }
- if tok.count > 1 then
- dock_name = tok[1].strip
+ if tok.count > 1 then
+ dock_name = tok[1].strip
incoming_dir = "#{@parent_server.incoming_path}/#{dock_name}"
FileUtils.mkdir_p(incoming_dir)
else
incoming_dir = "#{@parent_server.incoming_path}"
- end
+ end
file_path_list = []
begin
BuildCommServer.send_end(req)
end
- # "Register"
+ # "Register"
def handle_cmd_register( line, req )
@log.info "Received register REQ : #{line}"
BuildCommServer.send_begin(req)
tok = line.split("|").map { |x| x.strip }
- if tok.count < 3 then
- @log.error "Received Wrong REQ : #{line}"
+ if tok.count < 3 then
+ @log.error "Received Wrong REQ : #{line}"
BuildCommServer.send(req, "ERROR|Invalid REQ format")
return
end
- dist_name = tok[1].strip
+ dist_name = tok[1].strip
if tok[2].start_with? "DOCK" then
dock_name = tok[3]
else
file_path_list.push "#{@parent_server.incoming_path}/#{dock_name}/#{tok[idx]}"
end
- idx = idx + 1
- end
+ idx = idx + 1
+ end
# register mutex
- $register_mutex.synchronize {
+ $register_mutex.synchronize do
begin
@parent_server.reload_dist_package()
- snapshot_name = @parent_server.register( file_path_list, dist_name, true, false, true)
- BuildCommServer.send(req,"SUCC|#{snapshot_name}")
+ snapshot_name = @parent_server.register( file_path_list, dist_name, true, false, true)
+ BuildCommServer.send(req,"SUCC|#{snapshot_name}")
rescue => e
@log.error "register failed"
@log.error e.message
@parent_server.release_lock_file
return
end
- }
+ end
if not dock_name.empty? then
FileUtils.rm_rf "#{@parent_server.incoming_path}/#{dock_name}"
BuildCommServer.send_begin(req)
tok = line.split("|").map { |x| x.strip }
- if tok.count < 2 then
- @log.error "Received Wrong REQ : #{line}"
+ if tok.count < 2 then
+ @log.error "Received Wrong REQ : #{line}"
BuildCommServer.send(req, "ERROR|Invalid REQ format")
return
end
- passwd = tok[1].strip
+ passwd = tok[1].strip
- if @parent_server.passwd.eql? passwd then
+ if @parent_server.passwd.eql? passwd then
@parent_server.finish = true
@log.info "Package server stop flag set"
BuildCommServer.send(req,"SUCC")
BuildCommServer.send_end(req)
BuildCommServer.disconnect(req)
end
-end
+end
=begin
- client.rb
+ client.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
DEFAULT_INSTALL_DIR = "#{Utils::HOME}/build_root"
DEFAULT_SERVER_ADDR = "http://172.21.17.55/dibs/unstable"
OS_INFO_FILE = "os_info"
- ARCHIVE_PKG_LIST_FILE = "archive_pkg_list"
+ ARCHIVE_PKG_LIST_FILE = "archive_pkg_list"
attr_accessor :server_addr, :location, :pkg_hash_os, :is_server_remote, :installed_pkg_hash_loc, :archive_pkg_list, :all_dep_list, :log, :support_os_list, :config_dist_path, :download_path, :tmp_path, :snapshot_path, :snapshots_path, :snapshot_url
if server_addr.nil? then server_addr = get_default_server_addr() end
if location.nil? then location = get_default_inst_dir() end
- # chop server address, if end with "/"
+ # chop server address, if end with "/"
if server_addr.strip.end_with? "/" then server_addr = server_addr.chop end
@snapshot_path = nil
@snapshot_url = false
if is_snapshot_url(server_addr) then
- @snapshot_url = true
- @server_addr, @snapshot_path = split_addr_and_snapshot(server_addr)
- else
+ @snapshot_url = true
+ @server_addr, @snapshot_path = split_addr_and_snapshot(server_addr)
+ else
@server_addr = server_addr
- end
+ end
@location = location
@pkg_hash_os = {}
# read installed pkg list, and create hash
if not File.exist? @location then FileUtils.mkdir_p "#{@location}" end
- @log.info "Update local package list.. [#{@location}]"
+ @log.info "Update local package list.. [#{@location}]"
read_installed_pkg_list()
# read remote pkg list, and hash list
- @log.info "Update remote package list and supported os list.."
+ @log.info "Update remote package list and supported os list.."
update()
@log.info "Initialize - #{server_addr}, #{location}"
end
public
# update package list from server
def update()
- if not @snapshot_url then
- $get_snapshot_mutex.synchronize {
+ if not @snapshot_url then
+ $get_snapshot_mutex.synchronize do
@snapshot_path = get_lastest_snapshot(@is_server_remote)
- }
+ end
end
@log.info "The lastest snapshot : #{@snapshot_path}"
- if @snapshot_path.nil? then
+ if @snapshot_path.nil? then
@log.warn "Failed to get the lastest package list"
@snapshot_path = ""
end
exists_snapshot = false
- if is_snapshot_exist(@snapshot_path) then
- @log.info "Snapshot information is already cached [#{get_pkglist_path()}]"
+ if is_snapshot_exist(@snapshot_path) then
+ @log.info "Snapshot information is already cached [#{get_pkglist_path()}]"
exists_snapshot = true
- else
- @log.info "Snapshot information is not cached"
+ else
+ @log.info "Snapshot information is not cached"
end
list_path = get_pkglist_path()
- if list_path.nil? then
- @log.error "Failed to get package list path"
- return false
+ if list_path.nil? then
+ @log.error "Failed to get package list path"
+ return false
end
- clean_list()
+ clean_list()
if exists_snapshot then
read_supported_os_list(list_path)
read_remote_pkg_list(list_path)
read_archive_pkg_list(list_path)
else
- $update_mutex.synchronize {
+ $update_mutex.synchronize do
uniq_name = Utils.create_uniq_name
tmp_dir = File.join(@config_dist_path, uniq_name)
FileUtils.mkdir_p tmp_dir
if not download_os_list(@is_server_remote, tmp_dir) then
@log.error "\"#{@server_addr}\" does not have supported os list file properly."
- Utils.execute_shell("rm -rf #{tmp_dir}")
+ Utils.execute_shell("rm -rf #{tmp_dir}")
return false
- else read_supported_os_list(tmp_dir) end
+ else read_supported_os_list(tmp_dir) end
if not download_pkg_list(@is_server_remote, tmp_dir) then
- @log.error "\"#{@server_addr}\" does not have package list file properly."
- Utils.execute_shell("rm -rf #{tmp_dir}")
+ @log.error "\"#{@server_addr}\" does not have package list file properly."
+ Utils.execute_shell("rm -rf #{tmp_dir}")
return false
- else read_remote_pkg_list(tmp_dir) end
+ else read_remote_pkg_list(tmp_dir) end
if not download_archive_pkg_list(@is_server_remote, tmp_dir) then
- @log.error "\"#{@server_addr}\" does not have archive package list file properly. This error can be ignored"
- else read_archive_pkg_list(tmp_dir) end
+ @log.error "\"#{@server_addr}\" does not have archive package list file properly. This error can be ignored"
+ else read_archive_pkg_list(tmp_dir) end
Utils.execute_shell("mv #{tmp_dir} #{list_path}")
@log.info "Moved \"#{tmp_dir}\" to"
@log.info " \"#{list_path}\""
# tmp_dir should be removed whether mv command is failed
Utils.execute_shell("rm -rf #{tmp_dir}")
- remove_snapshots()
- }
- end
+ remove_snapshots()
+ end
+ end
- $update_mutex.synchronize {
+ $update_mutex.synchronize do
create_default_config(@server_addr)
@log.info "Update package list from \"#{@server_addr}\".. OK"
- }
+ end
return true
end
@archive_pkg_list.clear
@support_os_list.clear
@log.info "Cleard package list, supported os list.. OK"
- end
+ end
public
# download package
# download files
file_local_path = []
dependent_pkg_list.each do |p|
- pkg_name = get_attr_from_pkg(p, os, "name")
+ pkg_name = get_attr_from_pkg(p, os, "name")
pkg_path = get_attr_from_pkg(p, os, "path")
pkg_ver = get_attr_from_pkg(p, os, "version")
pkg_checksum = get_attr_from_pkg(p, os, "checksum")
pkg_file_prefix = "#{@download_path}/#{pkg_name}_*_#{os}.zip"
pkg_files = Dir[pkg_file_prefix].sort_by { |f| File.mtime(f) }.reverse
- if not pkg_files.nil? and pkg_files.length >= 4 then
+ if not pkg_files.nil? and pkg_files.length >= 4 then
Utils.execute_shell("rm -rf #{pkg_files[3..-1].join(" ")}")
@log.info "Removed old package files.."
- @log.info " * #{pkg_files[3..-1].join(", ")}"
+ @log.info " * #{pkg_files[3..-1].join(", ")}"
end
- end
+ end
private
def move_downloaded_pkg(filepath, distpath)
- if filepath.nil? or filepath == "" then return nil end
+ if filepath.nil? or filepath == "" then return nil end
filename = filepath.split('/')[-1]
if not File.exist? distpath then FileUtils.mkdir_p "#{distpath}" end
distfile = File.join(distpath, filename)
@log.info "Moving \"#{filename}\" to download cache directory"
@log.info " [path: #{distpath}]"
- $filemove_mutex.synchronize {
+ $filemove_mutex.synchronize do
if not File.exist? distfile then
Utils.execute_shell("mv #{filepath} #{distfile}")
- else
- Utils.execute_shell("rm -f #{filepath}")
- return distfile
+ else
+ Utils.execute_shell("rm -f #{filepath}")
+ return distfile
end
- }
+ end
if File.exist? distfile then return distfile
- else
+ else
@log.info "Failed to move [#{filenamae}] to "
@log.info " [#{distpath}]"
- return nil
+ return nil
end
- end
+ end
private
def remove_snapshots()
- listing_prefix = "#{@snapshots_path}/*"
+ listing_prefix = "#{@snapshots_path}/*"
dirs = Dir[listing_prefix].sort_by { |f| File.mtime(f) }.reverse
if not dirs.nil? and dirs.length >= 20 then
- Utils.execute_shell("rm -rf #{dirs[19..-1].join(" ")}")
+ Utils.execute_shell("rm -rf #{dirs[19..-1].join(" ")}")
@log.info "Removed old snapshots.."
- @log.info " * #{dirs[19]} ~ "
- end
- end
+ @log.info " * #{dirs[19]} ~ "
+ end
+ end
private
def get_cached_filepath(pkg_filename, pkg_checksum, pkg_size)
checksum = `sha256sum #{cached_filepath}`.split(" ")[0]
size = `du -b #{cached_filepath}`.split[0].strip
if checksum.eql? pkg_checksum and size.eql? pkg_size then
- return cached_filepath
- end
- end
+ return cached_filepath
+ end
+ end
return nil
- end
+ end
public
# download dependent source
end
# register file
- if not binary_list.empty? then
+ if not binary_list.empty? then
client = BuildCommClient.create(ip, port, @log)
dist = get_distribution
if dist.empty? then
@log.error "Failed to register"
return nil
end
- end
+ end
client.terminate
snapshot = @server_addr + "/snapshots/" + snapshot
list = get_all_reverse_install_dependent_packages_remote(pkg_name, os, true)
- if not list.nil? then
+ if not list.nil? then
list.each do |p|
ilist = get_attr_from_pkg(p, os, "install_dep_list")
if ilist.nil? then next end
return true
end
- private
+ private
# get distribution
def get_distribution()
server = @server_addr
server = server.delete ".:/@"
return server
- end
+ end
public
# install package
def install(pkg_name, os, trace, force)
ret = install_internal( pkg_name, os, trace, force )
- return ret
+ return ret
end
# TODO: need to compare dependent package version
# install packages including dependent packages
dependent_pkg_list.each do |pkg|
- if not install_pkg(pkg, os, force) then
+ if not install_pkg(pkg, os, force) then
@log.error "#{pkg} does not exist"
return false
end
if not repos_paths.nil? then
# search
binpkgs = []
- repos_paths.each { |repos_path|
+ repos_paths.each do |repos_path|
binpkgs += Dir.glob("#{repos_path}/#{p.package_name}_*_#{new_pkg_os}.zip")
- }
+ end
if not binpkgs.empty? then
if not install_local_pkg_internal(binpkgs[0], true, false, repos_paths) then
@log.warn "#{p} package is not installed"
end
- else
+ else
if not install_internal(p.package_name, new_pkg_os, true, false) then
@log.warn "#{p} package is not installed"
end
end
- else
+ else
if not install_internal(p.package_name, new_pkg_os, true, false) then
@log.warn "#{p} package is not installed"
end
- end
+ end
end
end
case compare_result
when -1 then next
when 0 then next
- when 1 then
+ when 1 then
@log.output "\"#{k}\" package : #{installed_ver} -> #{remote_ver}"
- update_pkgs.push(k)
+ update_pkgs.push(k)
end
end
@log.info "Checked packages for upgrading.. OK"
- return update_pkgs
+ return update_pkgs
end
public
public
# get default path for installing
def get_default_inst_dir()
- return Dir.pwd
+ return Dir.pwd
end
private
if not check_installed_pkg(pkg_name) then
@log.error "\"#{pkg_name}\" package is not installed."
- return false
+ return false
end
pkg_ver = get_attr_from_installed_pkg(pkg_name, "version")
return true
end
- public
+ public
# clean
def clean(force)
if not force then
pkg_list.each do |pkg|
pkg.build_dep_list.each do |dep|
if dep.package_name.eql? pkg_name and
- not dep.target_os_list.nil? and
+ not dep.target_os_list.nil? and
dep.target_os_list.include? os then
result.push(pkg)
end
if reverse.nil? then reverse = true end
@all_dep_list.clear
- begin
+ begin
get_build_dependency_arr(pkg_name, os, 0)
# in case of cross build dependency
rescue SystemStackError
if p[0].to_i.eql? i then
d = p[1]
remote_os = get_attr_from_pkg(d.package_name, os, "os")
- remote_ver = get_attr_from_pkg(d.package_name, os, "version")
+ remote_ver = get_attr_from_pkg(d.package_name, os, "version")
if not d.target_os_list.include? remote_os then
@log.error "\"#{pkg_name}\" package needs \"#{d.package_name}\" #{d.target_os_list.to_s}, but \"#{d.package_name}\" (#{remote_os}) package is in server"
return nil
if reverse.nil? then reverse = true end
@all_dep_list.clear
- begin
+ begin
get_install_dependency_arr(pkg_name, os, force, 0)
# in case of cross build dependency
rescue SystemStackError
@all_dep_list.each do |p|
if p[0].to_i.eql? i then
d = p[1]
- remote_ver = get_attr_from_pkg(d.package_name, os, "version")
+ remote_ver = get_attr_from_pkg(d.package_name, os, "version")
if not d.match? remote_ver then
@log.error "\"#{pkg_name}\" package needs \"#{d.package_name}\" #{d.comp} #{d.base_version}, but \"#{d.package_name}\" (#{remote_ver}) package is in server"
return nil
end
public
- # show all packages information
+ # show all packages information
def show_pkg_list(os)
pkg_hash = @pkg_hash_os[os]
if pkg_hash.nil? then
end
public
- # show all installed packages information
+ # show all installed packages information
def show_installed_pkg_list()
file_path = get_installed_pkg_list_file_path()
@log.error "Installed package list does not exist"
return nil
end
- pkg_all_list = []
+ pkg_all_list = []
pkg_list = pkg_hash.values
pkg_list.each do |p|
pkg_all_list.push([p.package_name, p.version, p.description])
get_install_dependency_arr(l.package_name, os, force, n+1)
end
- return
+ return
end
private
s = "#{n}:#{pkg_name}"
installed_pkg_hash_key = get_installed_pkg_list_file_path()
- pkg_hash = @installed_pkg_hash_loc[installed_pkg_hash_key]
+ pkg_hash = @installed_pkg_hash_loc[installed_pkg_hash_key]
pkg_list = pkg_hash.values
pkg_list.each do |pkg|
pkg.install_dep_list.each do |l|
def get_all_reverse_install_dependency_arr_remote(pkg_name, os, n)
s = "#{n}:#{pkg_name}"
- pkg_hash = @pkg_hash_os[os]
+ pkg_hash = @pkg_hash_os[os]
pkg_list = pkg_hash.values
pkg_list.each do |pkg|
pkg.install_dep_list.each do |l|
@log.error "Failed to uninstall \"#{pkg_name}\""
return false
end
- end
+ end
# install package
cached_filepath = nil
if Utils.is_linux_like_os( Utils::HOST_OS ) then
cached_filepath = get_cached_filepath(filename, pkg_checksum, pkg_size)
- end
+ end
if not cached_filepath.nil? then
- @log.info "Cached #{pkg_name} package file.. OK"
+ @log.info "Cached #{pkg_name} package file.. OK"
ret = FileInstaller.install(pkg_name, cached_filepath, type, @location, @log)
else
filepath = download(pkg_name, os, false, @tmp_path)
if filepath.nil? then
return false
- end
- filepath = move_downloaded_pkg(filepath[0], @download_path)
+ end
+ filepath = move_downloaded_pkg(filepath[0], @download_path)
if filepath.nil? then
- return false
- end
+ return false
+ end
ret = FileInstaller.install(pkg_name, filepath, type, @location, @log)
- remove_downloaded_pkgs(pkg_name, os)
- end
+ remove_downloaded_pkgs(pkg_name, os)
+ end
return ret
end
pkg_hash = {}
installed_pkg_hash_key = get_installed_pkg_list_file_path()
if @installed_pkg_hash_loc.has_key? installed_pkg_hash_key then
- pkg_hash = @installed_pkg_hash_loc[installed_pkg_hash_key]
+ pkg_hash = @installed_pkg_hash_loc[installed_pkg_hash_key]
pkg_hash[pkg_name] = get_pkg_from_list(pkg_name, os)
else pkg_hash[pkg_name] = get_pkg_from_list(pkg_name, os) end
@installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash
return nil
end
- pkg_hash = {}
+ pkg_hash = {}
installed_pkg_hash_key = get_installed_pkg_list_file_path()
if @installed_pkg_hash_loc.has_key? installed_pkg_hash_key then
- pkg_hash = @installed_pkg_hash_loc[installed_pkg_hash_key]
+ pkg_hash = @installed_pkg_hash_loc[installed_pkg_hash_key]
pkg_hash[pkg_name] = pkg
else pkg_hash[pkg_name] = pkg end
@installed_pkg_hash_loc[installed_pkg_hash_key] = pkg_hash
return pkg
end
- # get the lastest snapshot
+ # get the lastest snapshot
# from_server : if true, update from server
def get_lastest_snapshot(from_server)
ssinfo_file = "snapshot.info"
@log.warn "Server does not have \"#{ssinfo_file}\" file. This error can be ignored."
end
else
- if File.exist? file_url then FileUtils.cp(file_url, @config_dist_path)
+ if File.exist? file_url then FileUtils.cp(file_url, @config_dist_path)
else @log.warn "Server does not have \"#{ssinfo_file}\" file. This error can be ignored." end
end
end
def get_pkglist_path()
- return File.join(@config_dist_path, @snapshot_path)
+ return File.join(@config_dist_path, @snapshot_path)
end
# if url includes snapshot infomation, retuen true
def is_snapshot_url(addr = nil)
- if addr.nil? then addr = @server_addr end
+ if addr.nil? then addr = @server_addr end
addr_arr = addr.split('/')
if addr_arr[-2].eql? "snapshots" then
- return true
- else
- return false
+ return true
+ else
+ return false
end
- end
+ end
def split_addr_and_snapshot(addr = nil)
- if addr.nil? then addr = @server_addr end
+ if addr.nil? then addr = @server_addr end
addr_arr = addr.split('/')
- if addr_arr[-2].eql? "snapshots" then
+ if addr_arr[-2].eql? "snapshots" then
return addr_arr[0..-3].join("/"), addr_arr[-2..-1].join("/")
- else
- return nil
+ else
+ return nil
end
- end
+ end
def is_snapshot_exist(ss_path = nil)
if ss_path.nil? then ss_path = @snapshot_path
if File.exist? local_file_path then
begin
pkg_hash = Parser.read_repo_pkg_list_from local_file_path
- @pkg_hash_os[os] = pkg_hash
+ @pkg_hash_os[os] = pkg_hash
@log.info "Get package information for #{os}.. OK"
rescue => e
@log.error( e.message, Log::LV_USER)
@pkg_hash_os[os] = {}
end
else
- @log.warn "Failed to read pkg_list_#{os} file"
+ @log.warn "Failed to read pkg_list_#{os} file"
@pkg_hash_os[os] = {}
end
end
end
end
@log.info "Get supported os infomation.. OK"
- else
+ else
@log.warn "Failed to get supported os infomation"
end
end
def download_os_list(from_server, dist = nil)
- if dist.nil? then dist = get_pkglist_path end
+ if dist.nil? then dist = get_pkglist_path end
file_url = File.join(@server_addr, OS_INFO_FILE)
if from_server then
if not FileDownLoader.download(file_url, dist, @log) then return false end
else
- if File.exist? file_url then FileUtils.cp(file_url, dist)
+ if File.exist? file_url then FileUtils.cp(file_url, dist)
else return false end
end
return true
- end
+ end
def read_archive_pkg_list(list_path)
local_file_path = File.join(list_path, ARCHIVE_PKG_LIST_FILE)
if File.exist? local_file_path then
File.open(local_file_path, "r") do |f|
f.each_line do |l|
- pkg = l.strip
+ pkg = l.strip
if @archive_pkg_list.index(pkg).nil? then @archive_pkg_list.push(pkg) end
end
end
@log.info "Get archive package infomation.. OK"
- else
+ else
@log.warn "Failed to get archive package infomation"
end
end
def download_archive_pkg_list(from_server, dist = nil)
- if dist.nil? then dist = get_pkglist_path end
+ if dist.nil? then dist = get_pkglist_path end
file_url = File.join(@server_addr, @snapshot_path, ARCHIVE_PKG_LIST_FILE)
if from_server then
if not FileDownLoader.download(file_url, dist, @log) then return false end
else
- if File.exist? file_url then FileUtils.cp(file_url, dist)
+ if File.exist? file_url then FileUtils.cp(file_url, dist)
else return false end
end
return true
- end
+ end
def download_pkg_list(from_server, dist = nil)
- if dist.nil? then dist = get_pkglist_path end
+ if dist.nil? then dist = get_pkglist_path end
@support_os_list.each do |os|
filename = PKG_LIST_FILE_PREFIX + os
file_url = File.join(@server_addr, @snapshot_path, filename)
if from_server then
- if not FileDownLoader.download(file_url, dist, @log) then return false end
+ if not FileDownLoader.download(file_url, dist, @log) then return false end
else
if File.exist? file_url then FileUtils.cp(file_url, dist)
- else return false end
- end
+ else return false end
+ end
end
- return true
- end
+ return true
+ end
private
# create installed package hash
# get installed package list file path
def get_installed_pkg_list_file_path()
- file_full_path = File.join(@location, PACKAGE_INFO_DIR, INSTALLED_PKG_LIST_FILE)
+ file_full_path = File.join(@location, PACKAGE_INFO_DIR, INSTALLED_PKG_LIST_FILE)
return file_full_path
end
$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common"
require "utils"
-def set_default( options )
+def set_default( options )
if options[:t].nil? then options[:t] = false end
if options[:f].nil? then options[:f] = false end
if options[:v].nil? then options[:v] = false end
end
-def option_error_check( options )
+def option_error_check( options )
case options[:cmd]
when "check-upgrade" then
when "download" then
- if options[:pkg].nil? or options[:pkg].empty? then
+ if options[:pkg].nil? or options[:pkg].empty? then
raise ArgumentError, "Usage: pkg-cli download -P <package name> [-o <os>] [-l <location>] [-u <package server url>] [--trace]"
end
when "install" then
- if options[:pkg].nil? or options[:pkg].empty? then
+ if options[:pkg].nil? or options[:pkg].empty? then
raise ArgumentError, "Usage: pkg-cli install -P <package name> [-o <os>] [-l <location>] [-u <package server url>] [--trace] [--force]"
end
when "install-file" then
- if options[:pkg].nil? or options[:pkg].empty? then
+ if options[:pkg].nil? or options[:pkg].empty? then
raise ArgumentError, "Usage: pkg-cli install-lpkg -P <package file> [-l <location>] [-u <package server url>] [--trace] [--force]"
end
when "uninstall" then
- if options[:pkg].nil? or options[:pkg].empty? then
+ if options[:pkg].nil? or options[:pkg].empty? then
raise ArgumentError, "Usage: pkg-cli uninstall -P <package name> [-l <location>] [--trace]"
end
end
end
-def option_parse
+def option_parse
options = {}
banner = "Requiest service to package-server and control packages service command-line tool." + "\n" \
+ "\n" + "Usage: pkg-cli <SUBCOMMAND> [OPTS] or pkg-cli (-h|-v)" + "\n" \
opts.banner = banner
opts.on( '-P', '--pkg <package name/file>', 'package name or package file name' ) do |name|
- options[:pkg] = name
+ options[:pkg] = name
end
opts.on( '-o', '--os <operating system>', 'target operating system: ubuntu-32/ubuntu-64/windows-32/windows-64/macos-64' ) do |os|
end
opts.on( '-u', '--url <server url>', 'package server url: http://127.0.0.1/dibs/unstable' ) do |url|
- options[:url] = url
+ options[:url] = url
end
opts.on( '-l', '--loc <location>', 'install/download location' ) do |loc|
- options[:loc] = loc
+ options[:loc] = loc
end
opts.on( '--trace', 'enable trace dependent packages' ) do
end
end
- cmd = ARGV[0]
+ cmd = ARGV[0]
if cmd.eql? "update" or cmd.eql? "download" or
cmd.eql? "install" or cmd.eql? "show-rpkg" or
cmd.eql? "list-rpkg" or
cmd =~ /(-v)|(--version)/ or
cmd =~ /(help)|(-h)|(--help)/ then
- if cmd.eql? "help" then
- V[0] = "-h"
+ if cmd.eql? "help" then
+ V[0] = "-h"
end
options[:cmd] = ARGV[0]
else
set_default options
- # option error check
+ # option error check
option_error_check options
return options
-end
+end
require "parser"
require "installer"
-class Distribution
+class Distribution
attr_accessor :name, :location, :server_url, :lock_file_path, :last_sync_changes
# constant
@location = location
@server_url = server_url
@log = pkg_server.log
- @integrity = pkg_server.integrity
+ @integrity = pkg_server.integrity
@lock_file_path = "#{location}/#{LOCK_FILE}"
@pkg_hash_os = {}
@archive_pkg_list = []
end
# modified pkg class
- pkg.origin = "local"
+ pkg.origin = "local"
pkg.source = ""
pkg.path = "/binary/" + File.basename( file_path )
if pkg.checksum.empty? then
# TODO: windows and mac : sha256sum
if Utils.is_unix_like_os( Utils::HOST_OS ) then
pkg.checksum = `sha256sum #{file_path}`.split(" ")[0]
- end
+ end
end
if pkg.size.empty? then
pkg.size = `du -b #{file_path}`.split[0].strip
end
- @pkg_hash_os[pkg.os][pkg.package_name] = pkg
+ @pkg_hash_os[pkg.os][pkg.package_name] = pkg
return pkg
end
end
# modified pkg class
- pkg.origin = "local"
+ pkg.origin = "local"
pkg.source = ""
pkg.path = "/temp/" + File.basename( file_path )
# TODO: windows and mac : sha256sum
name = Utils.create_uniq_name
end
- # check base snapshot exist
- if File.exist? "#{@location}/snapshots/#{name}" then
+ # check base snapshot exist
+ if File.exist? "#{@location}/snapshots/#{name}" then
raise "Snapshot is already exist: #{name}"
end
FileUtils.mkdir "#{@location}/changes" if not File.exists? "#{@location}/changes"
File.open( "#{@location}/changes/#{name}.log","w") { |f| f.puts change_log_string }
- # base_snapshot_path
+ # base_snapshot_path
if base_snapshot.empty? then
snapshot_path = @location
else
# copy package list
@support_os_list.each do |os|
- FileUtils.copy_file( "#{snapshot_path}/#{PKG_LIST_FILE_PREFIX}#{os}",
+ FileUtils.copy_file( "#{snapshot_path}/#{PKG_LIST_FILE_PREFIX}#{os}",
"#{@location}/snapshots/#{name}/#{PKG_LIST_FILE_PREFIX}#{os}" )
- end
+ end
# copy archive package list
- FileUtils.copy_file( "#{snapshot_path}/#{ARCHIVE_PKG_FILE}",
+ FileUtils.copy_file( "#{snapshot_path}/#{ARCHIVE_PKG_FILE}",
"#{@location}/snapshots/#{name}/#{ARCHIVE_PKG_FILE}" )
- # copy os info file
- FileUtils.copy_file( "#{snapshot_path}/#{OS_INFO_FILE}",
+ # copy os info file
+ FileUtils.copy_file( "#{snapshot_path}/#{OS_INFO_FILE}",
"#{@location}/snapshots/#{name}/#{OS_INFO_FILE}" )
# generate temp file
while ( tmp_file_name.empty? )
tmp_file_name = @location + "/temp/." + Utils.create_uniq_name
- if File.exist? tmp_file_name then
+ if File.exist? tmp_file_name then
tmp_file_name = ""
- end
+ end
end
FileUtils.copy_file( "#{@location}/#{SNAPSHOT_INFO_FILE}", tmp_file_name )
File.open( tmp_file_name, "a" ) do |f|
f.puts "name : #{name}"
f.puts "time : #{Time.now.strftime("%Y%m%d%H%M%S")}"
- if from_cmd then
+ if from_cmd then
f.puts "type : manual"
else
f.puts "type : auto"
# snapshot is generated
@log.output( "snapshot is generated : #{@location}/snapshots/#{name}", Log::LV_USER)
return name
- end
+ end
- def sync(force, snapshot = "")
+ def sync(force, snapshot = "")
pkg_list_update_flag = false
archive_update_flag = false
distribution_update_flag = false
reload_distribution_information()
# check distribution's server_url
- if @server_url.empty? then
+ if @server_url.empty? then
@log.error("This distribution has not remote server", Log::LV_USER)
return false
end
end
if force then
- remove_os_list = @support_os_list - client.support_os_list
+ remove_os_list = @support_os_list - client.support_os_list
remove_os_list.each do |os|
remove_os(os)
changes.push "Remove OS #{os}"
update_pkg_list = []
@support_os_list.each do |os|
- # error check
- if client.pkg_hash_os[os].nil? then
+ # error check
+ if client.pkg_hash_os[os].nil? then
@log.error("package server does not have os : #{os}", Log::LV_USER)
next
- end
+ end
server_pkg_name_list = client.pkg_hash_os[os].keys
- local_pkg_name_list = @pkg_hash_os[os].keys
+ local_pkg_name_list = @pkg_hash_os[os].keys
full_pkg_name_list = server_pkg_name_list + local_pkg_name_list
full_pkg_name_list.uniq!
- full_pkg_name_list.each do |pkg_name|
+ full_pkg_name_list.each do |pkg_name|
ret = sync_package( pkg_name, client, os, force )
- if not ret.nil? then
+ if not ret.nil? then
update_pkg_list.push(ret)
- pkg_list_update_flag = true
+ pkg_list_update_flag = true
end
end
end
# if package is update when sync time then skip
if Utils.compare_version(local_pkg.version, pkg.version) == -1 then
next
- else
+ else
@log.info( "update package [#{pkg.package_name}] in #{pkg.os}", Log::LV_USER)
end
end
changes.push pkg.get_changes if pkg.does_change_exist?
when "REMOVE"
if not force then
- if @pkg_hash_os[os][pkg.package_name].origin.eql? "local" then
- else
+ if @pkg_hash_os[os][pkg.package_name].origin.eql? "local" then
+ else
@log.info( "remove package [#{pkg.package_name}] in #{pkg.os}", Log::LV_USER)
next
end
def add_os(os)
if @support_os_list.include? os then
@log.error("#{os} is already exist ", Log::LV_USER)
- return
+ return
end
# update os information
@support_os_list.push os
- @pkg_hash_os[os] = {}
+ @pkg_hash_os[os] = {}
File.open("#{@location}/#{OS_INFO_FILE}", "a") do |f|
f.puts os
end
- # create pkg_list_#{os} file
+ # create pkg_list_#{os} file
File.open( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", "w" ) do |f| end
end
# collect remaining file's name from current package server version
@support_os_list.each do |os|
- @pkg_hash_os[os].each_value{ |pkg|
+ @pkg_hash_os[os].each_value do |pkg|
file_list.push(pkg.path.sub("/binary/",""))
pkg.source_dep_list.each do |source_dep|
@log.error("Can't find dependency source package : #{source_dep.package_name}")
end
end
- }
- end
+ end
+ end
# remain only used archive package
@archive_pkg_list = used_archive_list.uniq
os_list.each do |os|
begin
- info_file = "#{@location}/snapshots/#{snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}"
+ info_file = "#{@location}/snapshots/#{snapshot}/#{PKG_LIST_FILE_PREFIX}#{os}"
if not File.exist? info_file then
@log.error( "pkg list file does not exist : #{info_file}", Log::LV_USER)
next
end
- pkg_list = Parser.read_repo_pkg_list_from(info_file)
+ pkg_list = Parser.read_repo_pkg_list_from(info_file)
- pkg_list.each_value{ |pkg|
+ pkg_list.each_value do |pkg|
file_list.push(pkg.path.sub("/binary/",""))
- }
+ end
rescue => e
@log.error( e.message, Log::LV_USER)
end
end
used_archive_list = used_archive_list + read_archive_pkg_list( snapshot )
- end
+ end
file_list.uniq!
used_archive_list.uniq!
- # remove unused binary file
+ # remove unused binary file
Dir.new( @location + "/binary" ).each do |file|
if file.start_with? "." then next end
if not file_list.include? file then
FileUtils.rm "#{@location}/binary/#{file}"
end
- end
+ end
- # remove unused archive file
+ # remove unused archive file
Dir.new( @location + "/source" ).each do |file|
if file.start_with? "." then next end
if not used_archive_list.include? file then
FileUtils.rm "#{@location}/source/#{file}"
end
- end
+ end
- # remove unused snapshot
+ # remove unused snapshot
Dir.new( @location + "/snapshots" ).each do |snapshot|
if snapshot.start_with? "." then next end
- if not remain_snapshot_list.include? snapshot then
+ if not remain_snapshot_list.include? snapshot then
FileUtils.rm_rf "#{@location}/snapshots/#{snapshot}"
end
end
def write_pkg_list( os )
# if input os is empty then return
- if os.nil? or os.empty? then return end
+ if os.nil? or os.empty? then return end
# generate temp file
tmp_file_name = ""
while ( tmp_file_name.empty? )
tmp_file_name = @location + "/temp/." + Utils.create_uniq_name
- if File.exist? tmp_file_name then
+ if File.exist? tmp_file_name then
tmp_file_name = ""
- end
+ end
end
- File.open( tmp_file_name, "w" ) do |f|
+ File.open( tmp_file_name, "w" ) do |f|
@pkg_hash_os[os].each_value do |pkg|
- # insert package information to file
+ # insert package information to file
pkg.print_to_file(f)
- # insert empty line to file
- f.puts
+ # insert empty line to file
+ f.puts
end
- end
+ end
- FileUtils.mv( tmp_file_name, "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", :force => true )
+ FileUtils.mv( tmp_file_name, "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}", :force => true )
end
- # input: package file path(zip file)
- # return: pkg
+ # input: package file path(zip file)
+ # return: pkg
def get_package_from_file(file_path)
tmp_dir = @location + "/" + Utils.create_uniq_name
- #if file extension is .zip then check pkginfo.manifest
+ #if file extension is .zip then check pkginfo.manifest
if File.extname(file_path).eql? ".zip" then
FileUtils.mkdir tmp_dir
FileUtils.rm_rf tmp_dir
return nil
end
- end
+ end
- def remove_pkg( pkg_name_list, os )
+ def remove_pkg( pkg_name_list, os )
if os.eql? "all" then os_list = @support_os_list
else os_list = [ os ]
end
pkg_name_list.each do |package_name|
- removed_flag = false
+ removed_flag = false
os_list.each do |o|
if not @support_os_list.include? o then
next
end
- if @pkg_hash_os[o].key?(package_name) then
+ if @pkg_hash_os[o].key?(package_name) then
@log.info( "remove package [#{package_name}] in #{o}", Log::LV_USER)
- @pkg_hash_os[o].delete(package_name)
+ @pkg_hash_os[o].delete(package_name)
removed_flag = true
end
- end
+ end
- if not removed_flag then
+ if not removed_flag then
if @archive_pkg_list.include? package_name then
@archive_pkg_list.delete package_name
else
@log.error( "Can't find package: [#{package_name}]", Log::LV_USER)
end
end
- end
+ end
# check install dependency integrity
if @integrity.eql? "YES" then
@log.info "integrity check"
- check_integrity
+ check_integrity
else
@log.info "skip integrity check"
- end
+ end
# update pkg_list file
os_list.each do |o|
- write_pkg_list(o)
+ write_pkg_list(o)
end
write_archive_pkg_list
- end
+ end
def remove_snapshot( snapshot_list )
remain_snapshot = []
removed_snapshot = []
- # remove unused snapshot
+ # remove unused snapshot
Dir.new( @location + "/snapshots" ).each do |snapshot|
if snapshot.start_with? "." then next end
- if snapshot_list.include? snapshot then
+ if snapshot_list.include? snapshot then
FileUtils.rm_rf "#{@location}/snapshots/#{snapshot}"
snapshot_list.delete snapshot
removed_snapshot.push snapshot
if not snapshot_list.empty? then
@log.output( "snapshot not exist : #{snapshot_list.join(",")}", Log::LV_USER )
- end
+ end
- if not removed_snapshot.empty? then
+ if not removed_snapshot.empty? then
@log.output( "snapshot removed: #{removed_snapshot.join(",")}", Log::LV_USER )
end
update_snapshot_info_file(remain_snapshot)
end
- def check_integrity
- @log.info "check server pkg's install dependency integrity"
+ def check_integrity
+ @log.info "check server pkg's install dependency integrity"
@support_os_list.each do |os|
@pkg_hash_os[os].each_value.each do |pkg|
check_package_integrity(pkg)
end
- end
+ end
end
def check_package_integrity(pkg)
- error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s install dependency not matched in "
+ error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s install dependency not matched in "
os = pkg.os
pkg.install_dep_list.each do |dep|
- if @pkg_hash_os[os].has_key? dep.package_name then
+ if @pkg_hash_os[os].has_key? dep.package_name then
target_pkg = @pkg_hash_os[os][dep.package_name]
- else
- raise RuntimeError,(error_msg + dep.to_s)
+ else
+ raise RuntimeError,(error_msg + dep.to_s)
end
- # check package's version
- if not dep.match? target_pkg.version then
+ # check package's version
+ if not dep.match? target_pkg.version then
raise RuntimeError,(error_msg + dep.to_s)
- end
+ end
- end
+ end
- error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s build dependency not matched in "
+ error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s build dependency not matched in "
pkg.build_dep_list.each do |dep|
if dep.target_os_list.length == 0 then
- build_dep_os = os
+ build_dep_os = os
else
build_dep_os = dep.target_os_list[0]
end
- if @pkg_hash_os[build_dep_os].has_key? dep.package_name then
+ if @pkg_hash_os[build_dep_os].has_key? dep.package_name then
target_pkg = @pkg_hash_os[build_dep_os][dep.package_name]
- else
- raise RuntimeError,(error_msg + dep.to_s)
+ else
+ raise RuntimeError,(error_msg + dep.to_s)
end
- # check package's version
- if not dep.match? target_pkg.version then
+ # check package's version
+ if not dep.match? target_pkg.version then
raise RuntimeError,(error_msg + dep.to_s)
- end
- end
+ end
+ end
- error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s source dependency not matched in "
+ error_msg = "[[#{pkg.package_name}] in #{pkg.os}]'s source dependency not matched in "
pkg.source_dep_list.each do |dep|
if not @archive_pkg_list.include? dep.package_name then
- raise RuntimeError,(error_msg + dep.to_s)
+ raise RuntimeError,(error_msg + dep.to_s)
end
- end
+ end
end
def read_archive_pkg_list( snapshot_name )
file_name = @location + "/" + ARCHIVE_PKG_FILE
else
file_name = @location + "/snapshots/" + snapshot_name + "/" + ARCHIVE_PKG_FILE
- end
+ end
- if File.exist? file_name
- File.open(file_name, "r") do |f|
- f.each_line do |l|
+ if File.exist? file_name
+ File.open(file_name, "r") do |f|
+ f.each_line do |l|
pkg_list.push(l.strip)
- end
- end
- end
+ end
+ end
+ end
return pkg_list
- end
+ end
def write_archive_pkg_list()
File.open( "#{@location}/#{ARCHIVE_PKG_FILE}", "w" ) do |f|
- @archive_pkg_list.each do |pkg|
- f.puts(pkg)
+ @archive_pkg_list.each do |pkg|
+ f.puts(pkg)
end
end
end
def initialize_pkg_list
- if not File.exist? "#{@location}/#{OS_INFO_FILE}" then
+ if not File.exist? "#{@location}/#{OS_INFO_FILE}" then
return
- end
+ end
- # get support_os_list
+ # get support_os_list
@support_os_list = []
File.open( "#{@location}/#{OS_INFO_FILE}", "r" ) do |f|
f.each_line do |l|
# read package_list file
@support_os_list.each do |os|
@pkg_hash_os[os] = {}
- pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"
+ pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"
- if File.exist? pkg_list_file then
+ if File.exist? pkg_list_file then
begin
@pkg_hash_os[os] = Parser.read_repo_pkg_list_from( pkg_list_file )
rescue => e
def get_link_package(pkg, pkg_os)
pkg.os_list.each do |os|
- # skip in same os for origin package
- if pkg_os.eql? os then next end
- # skip in unsupported os
- if not @support_os_list.include? os then next end
+ # skip in same os for origin package
+ if pkg_os.eql? os then next end
+ # skip in unsupported os
+ if not @support_os_list.include? os then next end
- exist_pkg = @pkg_hash_os[os][pkg.package_name]
+ exist_pkg = @pkg_hash_os[os][pkg.package_name]
if exist_pkg.nil? then next end
compare_version = Utils.compare_version(pkg.version, exist_pkg.version)
- # if version same then compatible package
- if compare_version == 0 then
+ # if version same then compatible package
+ if compare_version == 0 then
return exist_pkg
end
end
end
# PRIVATE METHODS/VARIABLES
- private
+ private
def sync_package( pkg_name, client, os, force )
server_pkg = client.pkg_hash_os[os][pkg_name]
- local_pkg = @pkg_hash_os[os][pkg_name]
+ local_pkg = @pkg_hash_os[os][pkg_name]
# if server and local has package
if ( not server_pkg.nil? ) and ( not local_pkg.nil? ) then
return nil
end
- if ( local_pkg.origin.eql? "local" ) and (not force) then
+ if ( local_pkg.origin.eql? "local" ) and (not force) then
# local_pkg is generated from local and not force mode then skip update
return nil
- end
+ end
pkg = sync_package2( server_pkg, client, os )
return ["ADD", os, pkg]
pkg = sync_package2( server_pkg, client, os )
return ["ADD", os, pkg]
# if package exist only local
- elsif ( not local_pkg.nil? ) then
+ elsif ( not local_pkg.nil? ) then
# if local pkg is generated from local then skip
- if local_pkg.origin.eql? "local" and (not force) then
+ if local_pkg.origin.eql? "local" and (not force) then
return nil
end
end
return nil
- end
+ end
def sync_package2( pkg, client, os )
pkg_name = pkg.package_name
# package update
file_path_list = client.download( pkg_name, os, false )
- # file download error check
- if file_path_list.nil? or file_path_list.empty? then
+ # file download error check
+ if file_path_list.nil? or file_path_list.empty? then
@log.error("Can't download package file [#{pkg_name}]", Log::LV_USER)
return nil
else
file_path = file_path_list[0]
- end
+ end
- # update pkg class
+ # update pkg class
pkg.path = "/binary/#{File.basename(file_path)}"
pkg.origin = client.server_addr
- return pkg
+ return pkg
end
if not File.exist? "#{@location}/#{SNAPSHOT_INFO_FILE}"
@log.error "Can not find snapshot info file"
return
- end
+ end
# generate temp file
tmp_file_name = ""
while ( tmp_file_name.empty? )
tmp_file_name = @location + "/temp/." + Utils.create_uniq_name
- if File.exist? tmp_file_name then
+ if File.exist? tmp_file_name then
tmp_file_name = ""
- end
+ end
end
# modify snapshot info File
info_file = File.readlines("#{@location}/#{SNAPSHOT_INFO_FILE}")
File.open(tmp_file_name, 'w') do |f|
save_flag = false
- info_file.each { |line|
- if line =~ /name :/ then
+ info_file.each do |line|
+ if line =~ /name :/ then
if remain_snapshot_list.include? line.split(':')[1].strip then
save_flag = true
else
end
- if save_flag then
+ if save_flag then
f.puts line
end
- }
+ end
end
FileUtils.mv( tmp_file_name, "#{@location}/#{SNAPSHOT_INFO_FILE}", :force => true )
depends_list = []
@support_os_list.each do |os|
- @pkg_hash_os[os].each_value{ |dpkg|
+ @pkg_hash_os[os].each_value do |dpkg|
if dpkg.install_dep_list.include? pkg or \
dpkg.build_dep_list.include? pkg then
- depends_list.push opkg
+ depends_list.push opkg
end
- }
- end
+ end
+ end
depends_list.each do |dpkg|
checked_list.push dpkg
end
return rdepends_list
- end
+ end
def reload_distribution_information
if not File.exist?("#{@location}/#{OS_INFO_FILE}") then
return
- end
+ end
- # get support_os_list
+ # get support_os_list
@support_os_list = []
File.open( "#{@location}/#{OS_INFO_FILE}", "r" ) do |f|
f.each_line do |l|
# read binary package_list file
@support_os_list.each do |os|
@pkg_hash_os[os] = {}
- pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"
+ pkg_list_file = "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}"
- if File.exist? pkg_list_file then
+ if File.exist? pkg_list_file then
begin
@pkg_hash_os[os] = Parser.read_repo_pkg_list_from( pkg_list_file )
rescue => e
while ( tmp_file_name.empty? )
tmp_file_name = @location + "/temp/." + Utils.create_uniq_name
- if File.exist? tmp_file_name then
+ if File.exist? tmp_file_name then
tmp_file_name = ""
- end
+ end
end
info_file = File.readlines("#{@location}/#{OS_INFO_FILE}")
File.open(tmp_file_name, "w") do |f|
info_file.each do |line|
- if not line.strip.eql? os then
- f.puts line
+ if not line.strip.eql? os then
+ f.puts line
end
end
end
FileUtils.mv( tmp_file_name, "#{@location}/#{OS_INFO_FILE}", :force => true )
- # delete pkg_list_#{os} file
- File.delete( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" )
+ # delete pkg_list_#{os} file
+ File.delete( "#{@location}/#{PKG_LIST_FILE_PREFIX}#{os}" )
end
def sync_archive_pkg
updated_file_list = []
# if update list is empty then return empty array
- if download_list.empty? then return updated_file_list end
+ if download_list.empty? then return updated_file_list end
download_list.each do |pkg|
file = client.download_dep_source(pkg)
=begin
- downloader.rb
+ downloader.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
# need verify
if ret then
logger.info "Downloaded #{filename}.. OK"
- else
- logger.info "Failed to download #{filename}"
+ else
+ logger.info "Failed to download #{filename}"
logger.info " [dist: #{path}]"
- end
+ end
return ret
end
end
=begin
- installer.rb
+ installer.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
else
logger.error "Failed to create temporary dir"
logger.info " [path: #{path}]"
- return false
+ return false
end
begin
logger.info "Installing \"#{package_name}\" package.."
- logger.info " [file: #{package_file_path}]"
+ logger.info " [file: #{package_file_path}]"
log = log + "## Extract file : #{package_file_path}\n"
result = extract_file(package_name, package_file_path, path, target_path, logger)
- if result == "" or result.nil? then
+ if result == "" or result.nil? then
write_log(target_path, package_name, log)
- return false
+ return false
else log = log + result end
log = log + "## Move files : \"#{path}\" to \"#{target_path}\"\n"
result = move_dir(package_name, path, target_path, logger)
- if result.nil? then
+ if result.nil? then
write_log(target_path, package_name, log)
return false
- else log = log + result end
+ else log = log + result end
log = log + "## Execute install script\n"
result = execute_install_script(package_name, path, target_path, logger)
if result.nil? then
write_log(target_path, package_name, log)
return false
- else log = log + result end
+ else log = log + result end
log = log + "## Move remove script\n"
result = move_remove_script(package_name, path, target_path, logger)
if result.nil? then
write_log(target_path, package_name, log)
return false
- else log = log + result end
+ else log = log + result end
log = log + "## Remove temporary dir : #{path} #####\n"
result = Utils.execute_shell_return("rm -rf #{path}")
if result.nil? then
logger.warn "Failed to remove temporary path"
logger.info " [path: #{path}]"
- end
+ end
rescue Interrupt
logger.error "FileInstaller: Interrupted.."
Utils.execute_shell("rm -rf #{path}")
File.open(pkg_inst_log_path, "a+") do |f|
f.puts log
end
- end
+ end
def FileInstaller.move_remove_script(package_name, path, target_path, logger)
target_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}"
logger.info " [file: #{script_file}]"
logger.info " [from: #{path}]"
logger.info " [to: #{target_path}]"
- return nil
+ return nil
else log = result.join("") end
logger.info "Moved remove script file.. OK"
log = log + "[file: #{script_file}]\n"
end
return log
- end
+ end
# Does not verify that the script execution is successful.
logger.info "Execute \"#{script_file}\" file"
if Utils.is_windows_like_os( Utils::HOST_OS ) then
target_path = target_path.gsub("/","\\")
- cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}"
+ cmd = "set INSTALLED_PATH=\"#{target_path}\"& #{script_file}"
else
cmd = "INSTALLED_PATH=\"#{target_path}\" #{script_file}"
end
def FileInstaller.execute_remove_script(package_name, target_path, logger)
info_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}"
if not File.directory? info_path then
- logger.error "\"#{info_path}\" does not exist."
- return nil
+ logger.error "\"#{info_path}\" does not exist."
+ return nil
end
script_file_prefix = "#{info_path}/remove.*"
list_path = target_path + "/#{PACKAGE_INFO_DIR}/#{package_name}"
if not File.directory? list_path then
- logger.error "\"#{list_path}\" does not exist."
- return false
+ logger.error "\"#{list_path}\" does not exist."
+ return false
end
list_file_name = "#{list_path}/#{package_name}.list"
if f.nil? or f.empty? then next end
file_path = File.join(target_path, f)
if File.directory? file_path then
- if File.symlink? file_path then
+ if File.symlink? file_path then
File.unlink file_path
next
end
Dir.rmdir(file_path)
rescue SystemCallError
logger.warn "\"#{file_path}\" directory is not empty"
- end
+ end
else directories.push(file_path) end
elsif File.file? file_path then FileUtils.rm_f(file_path)
- elsif File.symlink? file_path then File.unlink file_path
+ elsif File.symlink? file_path then File.unlink file_path
# if files are already removed by remove script,
else logger.warn "\"#{file_path}\" does not exist" end
end
Dir.rmdir(path)
rescue SystemCallError
logger.warn "\"#{file_path}\" directory is not empty"
- end
+ end
else next end
end
end
case type
when "binary" then
result = execute_remove_script(package_name, target_path, logger)
- if result.nil? then return false end
+ if result.nil? then return false end
if not remove_pkg_files(package_name, target_path, logger) then return false end
when "source" then
end
if not File.exist? pkginfo_path then
logger.error "#{PACKAGE_MANIFEST} file does not exist. Check #{source_path}"
- return nil
+ return nil
else FileUtils.cp pkginfo_path, config_path end
if File.exist? data_path then
- # if os is linux, use cpio. it is faster than cp
+ # if os is linux, use cpio. it is faster than cp
if Utils.is_linux_like_os( Utils::HOST_OS ) then
absolute_path = `readlink -f #{target_path}`
result = Utils.execute_shell_return("cd #{data_path}; find . -depth | cpio -pldm #{absolute_path}")
- else
+ else
result = Utils.execute_shell_return("cp -r #{data_path}/* #{target_path}")
end
if result.nil? then
show_file_list_command = "zip -sf #{package_file_path}"
extract_file_list_command = "unzip -o \"#{package_file_path}\" -d \"#{path}\""
when ".tar" then
- # path should be unix path if it is used in tar command
+ # path should be unix path if it is used in tar command
_package_file_path = Utils.get_unix_path(package_file_path)
_path = Utils.get_unix_path(path)
show_file_list_command = "tar -tf #{_package_file_path}"
extract_file_list_command = "tar xf \"#{_package_file_path}\" -C \"#{_path}\""
else
logger.error "\"#{filename}\" is not supported."
- return nil
+ return nil
end
system "#{show_file_list_command} > #{temp_pkg_file_list_path}"
log = unzip_file(package_file_path, path)
else
#result = Utils.execute_shell_return(extract_file_list_command)
- #if result.nil? then log = nil
- #else log = result.join("") end
+ #if result.nil? then log = nil
+ #else log = result.join("") end
log = `#{extract_file_list_command}`
end
when ".tar" then
#result = Utils.execute_shell_return(extract_file_list_command)
#if result.nil? then log = nil
- #else log = result.join("") end
+ #else log = result.join("") end
log = `#{extract_file_list_command}`
end
logger.info " [to: #{target_path}]"
logger.info " [cmd: #{extract_file_list_command}]"
return nil
- end
+ end
logger.info "Extracted \"#{filename}\" file.. OK"
log = log + "[file: #{package_file_path}]\n"
extract_file_command = "unzip -x #{package_file_path} #{target_file}"
end
when ".tar" then
- # path should be unix path if it is used in tar command
+ # path should be unix path if it is used in tar command
_package_file_path = Utils.get_unix_path(package_file_path)
_path = Utils.get_unix_path(path)
if not path.nil? then
end
def FileInstaller.unzip_file(zipfile, dest)
- log = ""
+ log = ""
Zip::ZipFile.open(zipfile) do |zip_file|
zip_file.each do |f|
f_path = File.join(dest, f.name)
=begin
- packageServer.rb
+ packageServer.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
require "DistSync"
class PackageServer
- attr_accessor :id, :location, :log, :integrity
+ attr_accessor :id, :location, :log, :integrity
attr_accessor :finish, :port
attr_accessor :incoming_path
attr_accessor :distribution_list
@integrity = "YES"
@auto_sync_flag = "NO"
@finish = false
- @port = 3333
+ @port = 3333
@test_time=0 #test time in mili-seconds
@lock_file= nil
@sync_interval = 3600
update_config_information(id)
if loc.nil? or loc.empty? then
- @location = Dir.pwd + "/" + @id
+ @location = Dir.pwd + "/" + @id
else
if Utils.is_absolute_path(loc) then
@location = File.join(loc, @id)
end
end
- # error check : check for already exist in server @id
+ # error check : check for already exist in server @id
if File.exist? @config_dir
raise RuntimeError, "Server create fail. server id [#{@id}] is already exist"
end
- # name check
- if dist_name.strip.eql? "distribution.info" then
+ # name check
+ if dist_name.strip.eql? "distribution.info" then
raise RuntimeError, "id \"distribution.info\" is not available"
- end
+ end
# create locking file
lock_file = Utils.file_lock(DIBS_LOCK_FILE_PATH)
- # create server config directory
+ # create server config directory
FileUtils.mkdir_p @config_dir
FileUtils.mkdir_p @incoming_path
if (not server_url.empty?) and \
(not Utils.is_url_remote(server_url)) and \
(not Utils.is_absolute_path(server_url)) then
- # if server_url is local server address then generate absoulte path
+ # if server_url is local server address then generate absoulte path
server_url = File.join(Utils::WORKING_DIR, server_url)
- end
+ end
# create server configure file
File.open( @config_file_path, "w" ) do |f|
@log.info "package register in server"
distribution = get_distribution( dist_name )
- # distribution lock
+ # distribution lock
@lock_file = Utils.file_lock(distribution.lock_file_path)
updated_os_list = []
snapshot_name = ""
file_path_list.each do |f|
- # error check for file exist
+ # error check for file exist
if not File.exist? f
raise RuntimeError, "package file does not exist [#{f}]"
- end
+ end
- pkg = distribution.get_package_from_file(f)
+ pkg = distribution.get_package_from_file(f)
# binary package
if not pkg.nil? then
- # find link package
+ # find link package
pkg_os = Utils.get_os_from_package_file(f)
link_pkg = distribution.get_link_package(pkg, pkg_os)
if link_pkg.nil? then
updated_pkg = register_package(distribution, pkg, f, test_flag, internal_flag)
- updated_os_list.push updated_pkg.os
+ updated_os_list.push updated_pkg.os
registed_package_list.push updated_pkg
# archive package
else
- if test_flag then
+ if test_flag then
@log.error("archive package does not using test mode", Log::LV_USER)
return
- end
+ end
file_name = File.basename(f)
distribution.register_archive_pkg(file_name)
archive_pkg_file_path_list.push f
- end
+ end
end
# check install dependency integrity
- if not test_flag and @integrity.eql? "YES" then
+ if not test_flag and @integrity.eql? "YES" then
registed_package_list.each do |pkg|
distribution.check_package_integrity(pkg)
end
# move file to package server
binary_pkg_file_path_list.each do |l|
- if test_flag then
+ if test_flag then
FileUtils.copy_file( l, "#{distribution.location}/temp/#{File.basename(l)}" )
- else
+ else
FileUtils.copy_file( l, "#{distribution.location}/binary/#{File.basename(l)}" )
- end
- end
+ end
+ end
- # link to package server
+ # link to package server
link_pkg_file_path_list.each do |l|
- if test_flag then
+ if test_flag then
src_file = File.join(distribution.location, l[0])
dest_file = File.join(distribution.location, "temp", l[1])
FileUtils.ln( src_file, dest_file, :force => true )
archive_pkg_file_path_list.each do |l|
FileUtils.mv( l, "#{distribution.location}/source/" )
- end
+ end
# write package list for updated os
updated_os_list.uniq!
updated_os_list.each do |os|
- distribution.write_pkg_list(os)
+ distribution.write_pkg_list(os)
end
- # register archive pakcage list.
+ # register archive pakcage list.
distribution.write_archive_pkg_list()
# send email
- if test_flag then
+ if test_flag then
msg_list = []
- registed_package_list.each { |p|
+ registed_package_list.each do |p|
msg_list.push("%-30s: %08s" % [ p.package_name.strip, p.version.strip ] )
- }
+ end
# email just remote package server
- # Mail.send_package_registe_mail( msg_list, @id )
+ # Mail.send_package_registe_mail( msg_list, @id )
end
# if snapshot mode is true then generate snapshot
def sync( dist_name, mode, snapshot = "" )
@log.info "sync from server"
- distribution = get_distribution( dist_name )
+ distribution = get_distribution( dist_name )
- if distribution.server_url.empty? then
- @log.error( "This distribution has not remote server", Log::LV_USER)
+ if distribution.server_url.empty? then
+ @log.error( "This distribution has not remote server", Log::LV_USER)
return
end
def add_distribution( dist_name, server_url, clone )
lock_file = Utils.file_lock(@server_lock_file_path)
- # error check : check for already exist in server directory
+ # error check : check for already exist in server directory
if @dist_to_server_url.keys.include? dist_name.strip then
Utils.file_unlock(@lock_file)
raise RuntimeError, "distribution already exist : #{dist_name}"
end
- # name check
- if dist_name.strip.eql? "distribution.info" then
+ # name check
+ if dist_name.strip.eql? "distribution.info" then
Utils.file_unlock(@lock_file)
raise RuntimeError, "id \"distribution.info\" is not available"
- end
+ end
# modify server url
if (not server_url.empty?) and (not Utils.is_url_remote(server_url))
- # if server_url is local server address then generate absoulte path
- if not Utils.is_absolute_path( server_url ) then
+ # if server_url is local server address then generate absoulte path
+ if not Utils.is_absolute_path( server_url ) then
if server_url.end_with?("/") then
- server_url = Utils::WORKING_DIR + server_url
+ server_url = Utils::WORKING_DIR + server_url
else
- server_url = Utils::WORKING_DIR + "/" + server_url
+ server_url = Utils::WORKING_DIR + "/" + server_url
end
end
- end
+ end
add_dist_for_config_file(dist_name, server_url, clone)
create_distribution_struct( dist_name, server_url )
def add_os(dist_name, os)
dist = get_distribution(dist_name)
- # distribution lock
+ # distribution lock
@lock_file = Utils.file_lock(dist.lock_file_path)
dist.add_os(os)
if File.exist? @config_file_path then
File.open @config_file_path do |f|
f.each_line do |l|
- if l.start_with?( "location : ") then
+ if l.start_with?( "location : ") then
location= l.split(" : ")[1]
FileUtils.rm_rf l.split(" : ")[1].strip
@log.info( "server location removed : #{location}", Log::LV_USER)
end
else
@log.error( "Can't find server information : #{@id}", Log::LV_USER)
- end
+ end
FileUtils.rm_rf @config_dir
FileUtils.rm_rf @log_file_path
lock_file = Utils.file_lock(@server_lock_file_path)
- # modify config file
+ # modify config file
config_file = File.readlines(@config_file_path)
File.open(@config_file_path, 'w') do |f|
- config_file.each { |line|
+ config_file.each do |line|
f.puts(line) if not line =~ /server_url : #{dist_name} ->/
- }
+ end
end
- # modify info file
+ # modify info file
config_file = File.readlines("#{@location}/distribution.info")
File.open("#{@location}/distribution.info", 'w') do |f|
remove_flag = false
- config_file.each { |line|
- if line.start_with? "name :" then
+ config_file.each do |line|
+ if line.start_with? "name :" then
if line.split(':')[1].strip.eql? dist_name then
remove_flag = true
else
if not remove_flag then
f.puts line
end
- }
+ end
end
- # remove distribution directory
+ # remove distribution directory
FileUtils.rm_rf distribution.location
# remove distribution struct
- @distribution_list.delete distribution
+ @distribution_list.delete distribution
Utils.file_unlock(lock_file)
end
distribution.remove_pkg(pkg_name_list, os)
- # generate snapshot
+ # generate snapshot
@log.info "generaging snapshot"
distribution.generate_snapshot("", "", false, "SYSTEM: Package \"#{pkg_name_list.join(", ")}\" is(are) removed in #{os} server")
lock_file = Utils.file_lock(@server_lock_file_path)
- distribution.clean( snapshot_list )
+ distribution.clean( snapshot_list )
- # remove incoming dir
+ # remove incoming dir
FileUtils.rm_rf incoming_path
FileUtils.mkdir incoming_path
end
# start server daemon
- def start( port, passwd )
+ def start( port, passwd )
@log.info "Package server Start..."
# set port number. default port is 3333
- @port = port
+ @port = port
# set job request listener
@log.info "Setting listener..."
autosync.start
end
- # set password
+ # set password
@passwd = passwd
# main loop
curr_time = Time.now
if (curr_time - start_time).to_i > @test_time then
puts "Test time is elapsed!"
- break
+ break
end
else
sleep 1
end
end
- end
+ end
- # stop server daemon
+ # stop server daemon
def stop( port, passwd )
# set port number. default port is 3333
- @port = port
+ @port = port
@finish = false
client = BuildCommClient.create("127.0.0.1", @port, @log)
- if client.nil? then
+ if client.nil? then
raise RuntimeError, "Server does not listen in #{@port} port"
end
client.send("STOP|#{passwd}")
ret = client.receive_data
- if ret[0].strip.eql? "SUCC" then
+ if ret[0].strip.eql? "SUCC" then
@log.output( "Package server is stopped", Log::LV_USER)
else
@log.output( "Package server return error message : #{ret}", Log::LV_USER)
end
client.terminate
- end
+ end
- def self.list_id
- @@log = PackageServerLog.new("#{SERVER_ROOT}/.log")
+ def self.list_id
+ @@log = PackageServerLog.new("#{SERVER_ROOT}/.log")
d = Dir.new( SERVER_ROOT )
- s = d.select {|f| not f.start_with?(".") }
+ s = d.select {|f| not f.start_with?(".") }
s.sort!
server_list = []
server_list.push id
@@log.output( id, Log::LV_USER)
- end
+ end
@@log.close
FileUtils.rm_rf("#{SERVER_ROOT}/.log")
end
def self.list_dist( id )
- @@log = PackageServerLog.new( "#{SERVER_ROOT}/.log" )
+ @@log = PackageServerLog.new( "#{SERVER_ROOT}/.log" )
@@log.output( "=== ID [#{id}]'s distribution list ===", Log::LV_USER)
config_file_path = "#{SERVER_ROOT}/#{id}/config"
if not File.exist? config_file_path
raise RuntimeError, "[#{id}] is not server ID"
- end
+ end
File.open config_file_path do |f|
f.each_line do |l|
return dist_list
end
- def get_default_dist_name()
- if @distribution_list.empty? then
+ def get_default_dist_name()
+ if @distribution_list.empty? then
raise RuntimeError,"Server [#{@id}] does not have distribution"
- end
- return @distribution_list[0].name
+ end
+ return @distribution_list[0].name
end
def reload_dist_package()
lock_file = Utils.file_lock(@server_lock_file_path)
@distribution_list.each do |dist|
dist.initialize_pkg_list
- end
+ end
Utils.file_unlock(lock_file)
- end
+ end
def release_lock_file
if not @lock_file.nil? then
Utils.file_unlock(@lock_file)
- end
+ end
end
# PRIVATE METHODS/VARIABLES
- private
+ private
- def server_information_initialize
+ def server_information_initialize
# if id is nil or empty then find default id
- if @id.nil? or @id.empty?
+ if @id.nil? or @id.empty?
d = Dir.new( SERVER_ROOT )
s = d.select {|f| not f.start_with?(".") }
if s.length.eql? 1 then
if File.exist? @config_file_path
File.open @config_file_path do |f|
f.each_line do |l|
- if l.start_with?( "location :") then
+ if l.start_with?( "location :") then
@location = l.split(" :")[1].strip
- elsif l.start_with?( "integrity check :") then
+ elsif l.start_with?( "integrity check :") then
@integrity = l.split(" :")[1].strip.upcase
- elsif l.start_with?( "auto sync :" ) then
+ elsif l.start_with?( "auto sync :" ) then
@auto_sync_flag = l.split(" :")[1].strip.upcase
- elsif l.start_with?( "sync interval :" ) then
+ elsif l.start_with?( "sync interval :" ) then
@sync_interval = l.split(" :")[1].strip.to_i
elsif l.start_with?( "server_url :" ) then
info = l.split(" :")[1].split("->")
- @dist_to_server_url[info[0].strip] = info[1].strip
+ @dist_to_server_url[info[0].strip] = info[1].strip
else
@log.error "server config file has invalid information [#{l}]"
end
end
def get_distribution( dist_name )
- if dist_name.nil? or dist_name.empty? then
- dist_name = get_default_dist_name()
+ if dist_name.nil? or dist_name.empty? then
+ dist_name = get_default_dist_name()
+ end
+ if dist_name.empty? then
+ raise RuntimeError,"Can't find distribution information"
end
- if dist_name.empty? then
- raise RuntimeError,"Can't find distribution information"
- end
@distribution_list.each do |dist|
if dist.name.eql? dist_name.strip
- return dist
+ return dist
end
end
def create_distribution_struct( dist_name, server_url )
if File.exist? "#{@location}/#{dist_name}"
- raise RuntimeError, "distribution directory already exist [#{@location}/#{dist_name}]"
+ raise RuntimeError, "distribution directory already exist [#{@location}/#{dist_name}]"
end
FileUtils.mkdir "#{@location}/#{dist_name}"
if not server_url.empty? then
@log.info "generate package server using remote package server [#{server_url}]"
- if Utils.is_url_remote(server_url) then
+ if Utils.is_url_remote(server_url) then
@log.info "[#{dist_name}] distribution creation. using remote server [#{server_url}]"
- else
+ else
@log.info "[#{dist_name}] distribution creation. using local server [#{server_url}]"
end
distribution.write_archive_pkg_list()
end
- # add dist information to distribution.info file
+ # add dist information to distribution.info file
File.open("#{@location}/distribution.info", "a") do |f|
f.puts "name : #{dist_name}"
f.puts "time : #{Time.now.strftime("%Y%m%d%H%M%S")}"
end
def register_package(distribution, pkg, file_path, test_flag, internal_flag)
- # get package class using bianry file
+ # get package class using bianry file
if pkg.nil? or pkg.package_name.empty? then
raise "[#{file_path}]'s pkginfo.manifest file is incomplete."
- end
+ end
- if not test_flag then
+ if not test_flag then
# error check
if pkg.package_name.empty? or pkg.version.empty? \
or pkg.os.empty? or pkg.maintainer.empty? then
raise "[#{file_path}]'s pkginfo.manifest file is incomplete."
- end
+ end
updated_pkg = distribution.register(file_path, pkg, internal_flag )
else
updated_pkg = distribution.register_for_test(file_path, pkg )
- end
+ end
return updated_pkg
- end
+ end
def add_dist_for_config_file(dist_name, server_url, clone)
File.open( @config_file_path, "a" ) do |f|
- if clone then
+ if clone then
@log.info "add distribution using [#{server_url}] in clone mode"
f.puts "server_url : #{dist_name} -> "
else
def update_config_information(id)
@id = id
@config_dir = "#{SERVER_ROOT}/#{@id}"
- @log_file_path = "#{SERVER_ROOT}/#{@id}.log"
- @config_file_path = "#{@config_dir}/config"
- @incoming_path = "#{@config_dir}/incoming"
+ @log_file_path = "#{SERVER_ROOT}/#{@id}.log"
+ @config_file_path = "#{@config_dir}/config"
+ @incoming_path = "#{@config_dir}/incoming"
@server_lock_file_path = "#{@config_dir}/.server_lock"
- end
+ end
end
=begin
- serverConfig.rb
+ serverConfig.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
=begin
- packageServerLog.rb
+ packageServerLog.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
protected
def output_extra(msg)
@second_out.puts msg
- end
+ end
end
=begin
- serverOptParser.rb
+ serverOptParser.rb
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
$LOAD_PATH.unshift File.dirname(File.dirname(__FILE__))+"/common"
require "utils"
-def set_default( options )
- options[:id] = ""
- options[:dist] = ""
- options[:url] = ""
- options[:os] = "all"
- options[:pkgs] = []
- options[:snaps] = []
- options[:bsnap] = ""
- options[:port] = "3333"
+def set_default( options )
+ options[:id] = ""
+ options[:dist] = ""
+ options[:url] = ""
+ options[:os] = "all"
+ options[:pkgs] = []
+ options[:snaps] = []
+ options[:bsnap] = ""
+ options[:port] = "3333"
options[:gensnap] = true
- options[:force] = false
- options[:test] = false
- options[:clone] = false
+ options[:force] = false
+ options[:test] = false
+ options[:clone] = false
options[:origin_pkg_name] = ""
options[:origin_pkg_os] = ""
options[:passwd] = ""
def option_error_check( options )
- case options[:cmd]
- when "create"
+ case options[:cmd]
+ when "create"
if options[:id].empty? or options[:dist].empty? then
raise ArgumentError, "Usage: pkg-svr create -n <server name> -d <distribution> [-u <remote server url>] [-l <location>] "
end
when "gen-snapshot"
if options[:snaps].empty? then
raise ArgumentError, "Usage: pkg-svr gen-snapshot -n <server name> -d <distribution> -s <snapshot name> [-b <base snapshot name>]"
- end
+ end
when "start"
if options[:port].empty? then
raise ArgumentError, "Usage: pkg-svr start -n <server name> -p <port> [-w <password>]"
end
end
-def option_parse
+def option_parse
options = {}
banner = "Package-server administer service command-line tool." + "\n" \
+ "\n" + "Usage: pkg-svr <SUBCOMMAND> [OPTS] or pkg-svr (-h|-v)" + "\n" \
# Set a banner, displayed at the top
# of the help screen.
- opts.banner = banner
+ opts.banner = banner
opts.on( '-n', '--name <server name>', 'package server name' ) do|name|
- options[:id] = name
+ options[:id] = name
end
opts.on( '-d', '--dist <distribution>', 'package server distribution' ) do|dist|
- options[:dist] = dist
+ options[:dist] = dist
end
opts.on( '-u', '--url <server url>', 'remote server url: http://127.0.0.1/dibs/unstable' ) do|url|
- options[:url] = url
+ options[:url] = url
end
opts.on( '-o', '--os <operating system>', 'target operating system' ) do|os|
list = pkgs.tr(" \t","").split(",")
list.each do |l|
if l.start_with? "~" then l = Utils::HOME + l.delete("~") end
- options[:pkgs].push l
- end
+ options[:pkgs].push l
+ end
end
opts.on( '-s', '--snapshot <snapshot>', 'a snapshot name or snapshot list' ) do|snaplist|
end
opts.on( '-b', '--base <base snapshot>', 'base snapshot name' ) do|bsnap|
- options[:bsnap] = bsnap
+ options[:bsnap] = bsnap
end
opts.on( '-l', '--loc <location>', 'server location' ) do|loc|
- options[:loc] = loc
+ options[:loc] = loc
end
opts.on( '-p', '--port <port>', 'port number' ) do|port|
end
end
- cmd = ARGV[0]
+ cmd = ARGV[0]
if cmd.eql? "create" or cmd.eql? "sync" \
or cmd.eql? "register" \
raise ArgumentError, "Usage: pkg-svr <SUBCOMMAND> [OPTS] or pkg-svr -h"
end
- # default value setting
+ # default value setting
set_default options
optparse.parse!
- # option error check
+ # option error check
option_error_check options
return options
-end
+end
#!/usr/bin/ruby
=begin
-
+
upgrade
Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
end
else
if options[:locate].nil? or options[:url].nil? then
- raise ArgumentError, "upgrade -u <package server url> [-l <dibs path>]" + "\n"
+ raise ArgumentError, "upgrade -u <package server url> [-l <dibs path>]" + "\n"
end
end
end
-def option_parse
+def option_parse
options = {}
banner = "DIBS upgrade service command-line tool." + "\n" \
+ "\n" + "Usage: upgrade -u <package server url> [-l <dibs path>]" + "\n" \
+ "\n" + "Options:" + "\n"
optparse = OptionParser.new(nil, 32, ' '*8) do|opts|
- opts.banner = banner
+ opts.banner = banner
options[:locate] = File.dirname(__FILE__)
opts.on( '-l', '--locate <dibs path>', 'located dibs path' ) do|locate|
end
opts.on( '-u', '--url <package server url>', 'package server url: http://127.0.0.1/dibs/unstable' ) do|url|
- options[:url] = url
+ options[:url] = url
end
opts.on( '-I', '--install', 'install, internal option' ) do
end
opts.on( '-n', '--name <server name>', 'build server name or package server name' ) do|name|
- options[:name] = name
+ options[:name] = name
end
options[:port] = 2222
optparse.parse!
option_error_check options
-
+
return options
-end
+end
-#option parsing
+#option parsing
begin
option = option_parse
rescue => e
# Download DIBS Package
client = Client.new( pkg_svr_url, NEW_VER_PATH, log)
client.update()
- client.install( DIBS_PKG_NAME, Utils::HOST_OS, true, true)
+ client.install( DIBS_PKG_NAME, Utils::HOST_OS, true, true)
# Copy Current path
- if File.exist? "#{dibs_path}" then
- FileUtils.rm_rf("#{dibs_path}")
+ if File.exist? "#{dibs_path}" then
+ FileUtils.rm_rf("#{dibs_path}")
#FileUtils.mkdir_p("#{dibs_path}")
end
if File.exist? "#{NEW_VER_PATH}/tools/dibs" then
end
# Execute start command
- if start_opt
+ if start_opt
if not build_server.nil? and svr_type.eql? "BUILDSERVER" then
# get friends server information
build_server.friend_servers.each do |svr|
ip = svr.ip
port = svr.port
-
+
build_client = BuildCommClient.create( ip, port )
if build_client.nil? then
log.info("Friend Server #{ip}:#{port} is not running!", Log::LV_USER)
- next
+ next
end
# send request
end
end
end
-
- # terminate
+
+ # terminate
build_client.terminate
end